[Ada] Add special bypass for obsolete code pattern
[official-gcc.git] / gcc / gimple-match-head.c
blob53278168a59f5ac10ce6760f04fd42589a0792e7
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
47 /* Forward declarations of the private auto-generated matchers.
48 They expect valueized operands in canonical order and do not
49 perform simplification of all-constant operands. */
50 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
51 code_helper, tree, tree);
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 code_helper, tree, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 code_helper, tree, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 code_helper, tree, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 code_helper, tree, tree, tree, tree, tree, tree);
60 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
61 tree (*)(tree));
62 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
63 tree (*)(tree));
64 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
65 tree (*)(tree));
66 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
67 tree (*)(tree));
68 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
69 tree (*)(tree));
71 const unsigned int gimple_match_op::MAX_NUM_OPS;
73 /* Return whether T is a constant that we'll dispatch to fold to
74 evaluate fully constant expressions. */
76 static inline bool
77 constant_for_folding (tree t)
79 return (CONSTANT_CLASS_P (t)
80 /* The following is only interesting to string builtins. */
81 || (TREE_CODE (t) == ADDR_EXPR
82 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
85 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
86 operation. Return true on success, storing the new operation in NEW_OP. */
88 static bool
89 convert_conditional_op (gimple_match_op *orig_op,
90 gimple_match_op *new_op)
92 internal_fn ifn;
93 if (orig_op->code.is_tree_code ())
94 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
95 else
97 combined_fn cfn = orig_op->code;
98 if (!internal_fn_p (cfn))
99 return false;
100 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
102 if (ifn == IFN_LAST)
103 return false;
104 unsigned int num_ops = orig_op->num_ops;
105 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
106 new_op->ops[0] = orig_op->cond.cond;
107 for (unsigned int i = 0; i < num_ops; ++i)
108 new_op->ops[i + 1] = orig_op->ops[i];
109 tree else_value = orig_op->cond.else_value;
110 if (!else_value)
111 else_value = targetm.preferred_else_value (ifn, orig_op->type,
112 num_ops, orig_op->ops);
113 new_op->ops[num_ops + 1] = else_value;
114 return true;
117 /* RES_OP is the result of a simplification. If it is conditional,
118 try to replace it with the equivalent UNCOND form, such as an
119 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
120 result of the replacement if appropriate, adding any new statements to
121 SEQ and using VALUEIZE as the valueization function. Return true if
122 this resimplification occurred and resulted in at least one change. */
124 static bool
125 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
126 tree (*valueize) (tree))
128 if (!res_op->cond.cond)
129 return false;
131 if (!res_op->cond.else_value
132 && res_op->code.is_tree_code ())
134 /* The "else" value doesn't matter. If the "then" value is a
135 gimple value, just use it unconditionally. This isn't a
136 simplification in itself, since there was no operation to
137 build in the first place. */
138 if (gimple_simplified_result_is_gimple_val (res_op))
140 res_op->cond.cond = NULL_TREE;
141 return false;
144 /* Likewise if the operation would not trap. */
145 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
146 && TYPE_OVERFLOW_TRAPS (res_op->type));
147 if (!operation_could_trap_p ((tree_code) res_op->code,
148 FLOAT_TYPE_P (res_op->type),
149 honor_trapv, res_op->op_or_null (1)))
151 res_op->cond.cond = NULL_TREE;
152 return false;
156 /* If the "then" value is a gimple value and the "else" value matters,
157 create a VEC_COND_EXPR between them, then see if it can be further
158 simplified. */
159 gimple_match_op new_op;
160 if (res_op->cond.else_value
161 && VECTOR_TYPE_P (res_op->type)
162 && gimple_simplified_result_is_gimple_val (res_op))
164 new_op.set_op (VEC_COND_EXPR, res_op->type,
165 res_op->cond.cond, res_op->ops[0],
166 res_op->cond.else_value);
167 *res_op = new_op;
168 return gimple_resimplify3 (seq, res_op, valueize);
171 /* Otherwise try rewriting the operation as an IFN_COND_* call.
172 Again, this isn't a simplification in itself, since it's what
173 RES_OP already described. */
174 if (convert_conditional_op (res_op, &new_op))
175 *res_op = new_op;
177 return false;
180 /* Helper that matches and simplifies the toplevel result from
181 a gimple_simplify run (where we don't want to build
182 a stmt in case it's used in in-place folding). Replaces
183 RES_OP with a simplified and/or canonicalized result and
184 returns whether any change was made. */
186 static bool
187 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
188 tree (*valueize)(tree))
190 if (constant_for_folding (res_op->ops[0]))
192 tree tem = NULL_TREE;
193 if (res_op->code.is_tree_code ())
194 tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
195 else
196 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
197 res_op->ops[0]);
198 if (tem != NULL_TREE
199 && CONSTANT_CLASS_P (tem))
201 if (TREE_OVERFLOW_P (tem))
202 tem = drop_tree_overflow (tem);
203 res_op->set_value (tem);
204 maybe_resimplify_conditional_op (seq, res_op, valueize);
205 return true;
209 /* Limit recursion, there are cases like PR80887 and others, for
210 example when value-numbering presents us with unfolded expressions
211 that we are really not prepared to handle without eventual
212 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
213 itself as available expression. */
214 static unsigned depth;
215 if (depth > 10)
217 if (dump_file && (dump_flags & TDF_FOLDING))
218 fprintf (dump_file, "Aborting expression simplification due to "
219 "deep recursion\n");
220 return false;
223 ++depth;
224 gimple_match_op res_op2 (*res_op);
225 if (gimple_simplify (&res_op2, seq, valueize,
226 res_op->code, res_op->type, res_op->ops[0]))
228 --depth;
229 *res_op = res_op2;
230 return true;
232 --depth;
234 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
235 return true;
237 return false;
240 /* Helper that matches and simplifies the toplevel result from
241 a gimple_simplify run (where we don't want to build
242 a stmt in case it's used in in-place folding). Replaces
243 RES_OP with a simplified and/or canonicalized result and
244 returns whether any change was made. */
246 static bool
247 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
248 tree (*valueize)(tree))
250 if (constant_for_folding (res_op->ops[0])
251 && constant_for_folding (res_op->ops[1]))
253 tree tem = NULL_TREE;
254 if (res_op->code.is_tree_code ())
255 tem = const_binop (res_op->code, res_op->type,
256 res_op->ops[0], res_op->ops[1]);
257 else
258 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
259 res_op->ops[0], res_op->ops[1]);
260 if (tem != NULL_TREE
261 && CONSTANT_CLASS_P (tem))
263 if (TREE_OVERFLOW_P (tem))
264 tem = drop_tree_overflow (tem);
265 res_op->set_value (tem);
266 maybe_resimplify_conditional_op (seq, res_op, valueize);
267 return true;
271 /* Canonicalize operand order. */
272 bool canonicalized = false;
273 if (res_op->code.is_tree_code ()
274 && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
275 || commutative_tree_code (res_op->code))
276 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
278 std::swap (res_op->ops[0], res_op->ops[1]);
279 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
280 res_op->code = swap_tree_comparison (res_op->code);
281 canonicalized = true;
284 /* Limit recursion, see gimple_resimplify1. */
285 static unsigned depth;
286 if (depth > 10)
288 if (dump_file && (dump_flags & TDF_FOLDING))
289 fprintf (dump_file, "Aborting expression simplification due to "
290 "deep recursion\n");
291 return false;
294 ++depth;
295 gimple_match_op res_op2 (*res_op);
296 if (gimple_simplify (&res_op2, seq, valueize,
297 res_op->code, res_op->type,
298 res_op->ops[0], res_op->ops[1]))
300 --depth;
301 *res_op = res_op2;
302 return true;
304 --depth;
306 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
307 return true;
309 return canonicalized;
312 /* Helper that matches and simplifies the toplevel result from
313 a gimple_simplify run (where we don't want to build
314 a stmt in case it's used in in-place folding). Replaces
315 RES_OP with a simplified and/or canonicalized result and
316 returns whether any change was made. */
318 static bool
319 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
320 tree (*valueize)(tree))
322 if (constant_for_folding (res_op->ops[0])
323 && constant_for_folding (res_op->ops[1])
324 && constant_for_folding (res_op->ops[2]))
326 tree tem = NULL_TREE;
327 if (res_op->code.is_tree_code ())
328 tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
329 res_op->ops[0], res_op->ops[1],
330 res_op->ops[2]);
331 else
332 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
333 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
334 if (tem != NULL_TREE
335 && CONSTANT_CLASS_P (tem))
337 if (TREE_OVERFLOW_P (tem))
338 tem = drop_tree_overflow (tem);
339 res_op->set_value (tem);
340 maybe_resimplify_conditional_op (seq, res_op, valueize);
341 return true;
345 /* Canonicalize operand order. */
346 bool canonicalized = false;
347 if (res_op->code.is_tree_code ()
348 && commutative_ternary_tree_code (res_op->code)
349 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
351 std::swap (res_op->ops[0], res_op->ops[1]);
352 canonicalized = true;
355 /* Limit recursion, see gimple_resimplify1. */
356 static unsigned depth;
357 if (depth > 10)
359 if (dump_file && (dump_flags & TDF_FOLDING))
360 fprintf (dump_file, "Aborting expression simplification due to "
361 "deep recursion\n");
362 return false;
365 ++depth;
366 gimple_match_op res_op2 (*res_op);
367 if (gimple_simplify (&res_op2, seq, valueize,
368 res_op->code, res_op->type,
369 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
371 --depth;
372 *res_op = res_op2;
373 return true;
375 --depth;
377 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
378 return true;
380 return canonicalized;
383 /* Helper that matches and simplifies the toplevel result from
384 a gimple_simplify run (where we don't want to build
385 a stmt in case it's used in in-place folding). Replaces
386 RES_OP with a simplified and/or canonicalized result and
387 returns whether any change was made. */
389 static bool
390 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
391 tree (*valueize)(tree))
393 /* No constant folding is defined for four-operand functions. */
395 /* Limit recursion, see gimple_resimplify1. */
396 static unsigned depth;
397 if (depth > 10)
399 if (dump_file && (dump_flags & TDF_FOLDING))
400 fprintf (dump_file, "Aborting expression simplification due to "
401 "deep recursion\n");
402 return false;
405 ++depth;
406 gimple_match_op res_op2 (*res_op);
407 if (gimple_simplify (&res_op2, seq, valueize,
408 res_op->code, res_op->type,
409 res_op->ops[0], res_op->ops[1], res_op->ops[2],
410 res_op->ops[3]))
412 --depth;
413 *res_op = res_op2;
414 return true;
416 --depth;
418 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
419 return true;
421 return false;
424 /* Helper that matches and simplifies the toplevel result from
425 a gimple_simplify run (where we don't want to build
426 a stmt in case it's used in in-place folding). Replaces
427 RES_OP with a simplified and/or canonicalized result and
428 returns whether any change was made. */
430 static bool
431 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
432 tree (*valueize)(tree))
434 /* No constant folding is defined for five-operand functions. */
436 gimple_match_op res_op2 (*res_op);
437 if (gimple_simplify (&res_op2, seq, valueize,
438 res_op->code, res_op->type,
439 res_op->ops[0], res_op->ops[1], res_op->ops[2],
440 res_op->ops[3], res_op->ops[4]))
442 *res_op = res_op2;
443 return true;
446 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
447 return true;
449 return false;
452 /* Match and simplify the toplevel valueized operation THIS.
453 Replaces THIS with a simplified and/or canonicalized result and
454 returns whether any change was made. */
456 bool
457 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
459 switch (num_ops)
461 case 1:
462 return gimple_resimplify1 (seq, this, valueize);
463 case 2:
464 return gimple_resimplify2 (seq, this, valueize);
465 case 3:
466 return gimple_resimplify3 (seq, this, valueize);
467 case 4:
468 return gimple_resimplify4 (seq, this, valueize);
469 case 5:
470 return gimple_resimplify5 (seq, this, valueize);
471 default:
472 gcc_unreachable ();
476 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
477 build a GENERIC tree for that expression and update RES_OP accordingly. */
479 void
480 maybe_build_generic_op (gimple_match_op *res_op)
482 tree_code code = (tree_code) res_op->code;
483 tree val;
484 switch (code)
486 case REALPART_EXPR:
487 case IMAGPART_EXPR:
488 case VIEW_CONVERT_EXPR:
489 val = build1 (code, res_op->type, res_op->ops[0]);
490 res_op->set_value (val);
491 break;
492 case BIT_FIELD_REF:
493 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
494 res_op->ops[2]);
495 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
496 res_op->set_value (val);
497 break;
498 default:;
502 tree (*mprts_hook) (gimple_match_op *);
504 /* Try to build RES_OP, which is known to be a call to FN. Return null
505 if the target doesn't support the function. */
507 static gcall *
508 build_call_internal (internal_fn fn, gimple_match_op *res_op)
510 if (direct_internal_fn_p (fn))
512 tree_pair types = direct_internal_fn_types (fn, res_op->type,
513 res_op->ops);
514 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
515 return NULL;
517 return gimple_build_call_internal (fn, res_op->num_ops,
518 res_op->op_or_null (0),
519 res_op->op_or_null (1),
520 res_op->op_or_null (2),
521 res_op->op_or_null (3),
522 res_op->op_or_null (4));
525 /* Push the exploded expression described by RES_OP as a statement to
526 SEQ if necessary and return a gimple value denoting the value of the
527 expression. If RES is not NULL then the result will be always RES
528 and even gimple values are pushed to SEQ. */
530 tree
531 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
533 tree *ops = res_op->ops;
534 unsigned num_ops = res_op->num_ops;
536 /* The caller should have converted conditional operations into an UNCOND
537 form and resimplified as appropriate. The conditional form only
538 survives this far if that conversion failed. */
539 if (res_op->cond.cond)
540 return NULL_TREE;
542 if (res_op->code.is_tree_code ())
544 if (!res
545 && gimple_simplified_result_is_gimple_val (res_op))
546 return ops[0];
547 if (mprts_hook)
549 tree tem = mprts_hook (res_op);
550 if (tem)
551 return tem;
555 if (!seq)
556 return NULL_TREE;
558 /* Play safe and do not allow abnormals to be mentioned in
559 newly created statements. */
560 for (unsigned int i = 0; i < num_ops; ++i)
561 if (TREE_CODE (ops[i]) == SSA_NAME
562 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
563 return NULL_TREE;
565 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
566 for (unsigned int i = 0; i < 2; ++i)
567 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
568 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
569 return NULL_TREE;
571 if (res_op->code.is_tree_code ())
573 if (!res)
575 if (gimple_in_ssa_p (cfun))
576 res = make_ssa_name (res_op->type);
577 else
578 res = create_tmp_reg (res_op->type);
580 maybe_build_generic_op (res_op);
581 gimple *new_stmt = gimple_build_assign (res, res_op->code,
582 res_op->op_or_null (0),
583 res_op->op_or_null (1),
584 res_op->op_or_null (2));
585 gimple_seq_add_stmt_without_update (seq, new_stmt);
586 return res;
588 else
590 gcc_assert (num_ops != 0);
591 combined_fn fn = res_op->code;
592 gcall *new_stmt = NULL;
593 if (internal_fn_p (fn))
595 /* Generate the given function if we can. */
596 internal_fn ifn = as_internal_fn (fn);
597 new_stmt = build_call_internal (ifn, res_op);
598 if (!new_stmt)
599 return NULL_TREE;
601 else
603 /* Find the function we want to call. */
604 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
605 if (!decl)
606 return NULL;
608 /* We can't and should not emit calls to non-const functions. */
609 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
610 return NULL;
612 new_stmt = gimple_build_call (decl, num_ops,
613 res_op->op_or_null (0),
614 res_op->op_or_null (1),
615 res_op->op_or_null (2),
616 res_op->op_or_null (3),
617 res_op->op_or_null (4));
619 if (!res)
621 if (gimple_in_ssa_p (cfun))
622 res = make_ssa_name (res_op->type);
623 else
624 res = create_tmp_reg (res_op->type);
626 gimple_call_set_lhs (new_stmt, res);
627 gimple_seq_add_stmt_without_update (seq, new_stmt);
628 return res;
633 /* Public API overloads follow for operation being tree_code or
634 built_in_function and for one to three operands or arguments.
635 They return NULL_TREE if nothing could be simplified or
636 the resulting simplified value with parts pushed to SEQ.
637 If SEQ is NULL then if the simplification needs to create
638 new stmts it will fail. If VALUEIZE is non-NULL then all
639 SSA names will be valueized using that hook prior to
640 applying simplifications. */
642 /* Unary ops. */
644 tree
645 gimple_simplify (enum tree_code code, tree type,
646 tree op0,
647 gimple_seq *seq, tree (*valueize)(tree))
649 if (constant_for_folding (op0))
651 tree res = const_unop (code, type, op0);
652 if (res != NULL_TREE
653 && CONSTANT_CLASS_P (res))
654 return res;
657 gimple_match_op res_op;
658 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
659 return NULL_TREE;
660 return maybe_push_res_to_seq (&res_op, seq);
663 /* Binary ops. */
665 tree
666 gimple_simplify (enum tree_code code, tree type,
667 tree op0, tree op1,
668 gimple_seq *seq, tree (*valueize)(tree))
670 if (constant_for_folding (op0) && constant_for_folding (op1))
672 tree res = const_binop (code, type, op0, op1);
673 if (res != NULL_TREE
674 && CONSTANT_CLASS_P (res))
675 return res;
678 /* Canonicalize operand order both for matching and fallback stmt
679 generation. */
680 if ((commutative_tree_code (code)
681 || TREE_CODE_CLASS (code) == tcc_comparison)
682 && tree_swap_operands_p (op0, op1))
684 std::swap (op0, op1);
685 if (TREE_CODE_CLASS (code) == tcc_comparison)
686 code = swap_tree_comparison (code);
689 gimple_match_op res_op;
690 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
691 return NULL_TREE;
692 return maybe_push_res_to_seq (&res_op, seq);
695 /* Ternary ops. */
697 tree
698 gimple_simplify (enum tree_code code, tree type,
699 tree op0, tree op1, tree op2,
700 gimple_seq *seq, tree (*valueize)(tree))
702 if (constant_for_folding (op0) && constant_for_folding (op1)
703 && constant_for_folding (op2))
705 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
706 if (res != NULL_TREE
707 && CONSTANT_CLASS_P (res))
708 return res;
711 /* Canonicalize operand order both for matching and fallback stmt
712 generation. */
713 if (commutative_ternary_tree_code (code)
714 && tree_swap_operands_p (op0, op1))
715 std::swap (op0, op1);
717 gimple_match_op res_op;
718 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
719 return NULL_TREE;
720 return maybe_push_res_to_seq (&res_op, seq);
723 /* Builtin or internal function with one argument. */
725 tree
726 gimple_simplify (combined_fn fn, tree type,
727 tree arg0,
728 gimple_seq *seq, tree (*valueize)(tree))
730 if (constant_for_folding (arg0))
732 tree res = fold_const_call (fn, type, arg0);
733 if (res && CONSTANT_CLASS_P (res))
734 return res;
737 gimple_match_op res_op;
738 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
739 return NULL_TREE;
740 return maybe_push_res_to_seq (&res_op, seq);
743 /* Builtin or internal function with two arguments. */
745 tree
746 gimple_simplify (combined_fn fn, tree type,
747 tree arg0, tree arg1,
748 gimple_seq *seq, tree (*valueize)(tree))
750 if (constant_for_folding (arg0)
751 && constant_for_folding (arg1))
753 tree res = fold_const_call (fn, type, arg0, arg1);
754 if (res && CONSTANT_CLASS_P (res))
755 return res;
758 gimple_match_op res_op;
759 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
760 return NULL_TREE;
761 return maybe_push_res_to_seq (&res_op, seq);
764 /* Builtin or internal function with three arguments. */
766 tree
767 gimple_simplify (combined_fn fn, tree type,
768 tree arg0, tree arg1, tree arg2,
769 gimple_seq *seq, tree (*valueize)(tree))
771 if (constant_for_folding (arg0)
772 && constant_for_folding (arg1)
773 && constant_for_folding (arg2))
775 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
776 if (res && CONSTANT_CLASS_P (res))
777 return res;
780 gimple_match_op res_op;
781 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
782 return NULL_TREE;
783 return maybe_push_res_to_seq (&res_op, seq);
786 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
787 VALUEIZED to true if valueization changed OP. */
789 static inline tree
790 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
792 if (valueize && TREE_CODE (op) == SSA_NAME)
794 tree tem = valueize (op);
795 if (tem && tem != op)
797 op = tem;
798 valueized = true;
801 return op;
804 /* If RES_OP is a call to a conditional internal function, try simplifying
805 the associated unconditional operation and using the result to build
806 a new conditional operation. For example, if RES_OP is:
808 IFN_COND_ADD (COND, A, B, ELSE)
810 try simplifying (plus A B) and using the result to build a replacement
811 for the whole IFN_COND_ADD.
813 Return true if this approach led to a simplification, otherwise leave
814 RES_OP unchanged (and so suitable for other simplifications). When
815 returning true, add any new statements to SEQ and use VALUEIZE as the
816 valueization function.
818 RES_OP is known to be a call to IFN. */
820 static bool
821 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
822 gimple_seq *seq, tree (*valueize) (tree))
824 code_helper op;
825 tree_code code = conditional_internal_fn_code (ifn);
826 if (code != ERROR_MARK)
827 op = code;
828 else
830 ifn = get_unconditional_internal_fn (ifn);
831 if (ifn == IFN_LAST)
832 return false;
833 op = as_combined_fn (ifn);
836 unsigned int num_ops = res_op->num_ops;
837 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
838 res_op->ops[num_ops - 1]),
839 op, res_op->type, num_ops - 2);
840 for (unsigned int i = 1; i < num_ops - 1; ++i)
841 cond_op.ops[i - 1] = res_op->ops[i];
842 switch (num_ops - 2)
844 case 2:
845 if (!gimple_resimplify2 (seq, &cond_op, valueize))
846 return false;
847 break;
848 case 3:
849 if (!gimple_resimplify3 (seq, &cond_op, valueize))
850 return false;
851 break;
852 default:
853 gcc_unreachable ();
855 *res_op = cond_op;
856 maybe_resimplify_conditional_op (seq, res_op, valueize);
857 return true;
860 /* The main STMT based simplification entry. It is used by the fold_stmt
861 and the fold_stmt_to_constant APIs. */
863 bool
864 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
865 tree (*valueize)(tree), tree (*top_valueize)(tree))
867 switch (gimple_code (stmt))
869 case GIMPLE_ASSIGN:
871 enum tree_code code = gimple_assign_rhs_code (stmt);
872 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
873 switch (gimple_assign_rhs_class (stmt))
875 case GIMPLE_SINGLE_RHS:
876 if (code == REALPART_EXPR
877 || code == IMAGPART_EXPR
878 || code == VIEW_CONVERT_EXPR)
880 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
881 bool valueized = false;
882 op0 = do_valueize (op0, top_valueize, valueized);
883 res_op->set_op (code, type, op0);
884 return (gimple_resimplify1 (seq, res_op, valueize)
885 || valueized);
887 else if (code == BIT_FIELD_REF)
889 tree rhs1 = gimple_assign_rhs1 (stmt);
890 tree op0 = TREE_OPERAND (rhs1, 0);
891 bool valueized = false;
892 op0 = do_valueize (op0, top_valueize, valueized);
893 res_op->set_op (code, type, op0,
894 TREE_OPERAND (rhs1, 1),
895 TREE_OPERAND (rhs1, 2),
896 REF_REVERSE_STORAGE_ORDER (rhs1));
897 if (res_op->reverse)
898 return valueized;
899 return (gimple_resimplify3 (seq, res_op, valueize)
900 || valueized);
902 else if (code == SSA_NAME
903 && top_valueize)
905 tree op0 = gimple_assign_rhs1 (stmt);
906 tree valueized = top_valueize (op0);
907 if (!valueized || op0 == valueized)
908 return false;
909 res_op->set_op (TREE_CODE (op0), type, valueized);
910 return true;
912 break;
913 case GIMPLE_UNARY_RHS:
915 tree rhs1 = gimple_assign_rhs1 (stmt);
916 bool valueized = false;
917 rhs1 = do_valueize (rhs1, top_valueize, valueized);
918 res_op->set_op (code, type, rhs1);
919 return (gimple_resimplify1 (seq, res_op, valueize)
920 || valueized);
922 case GIMPLE_BINARY_RHS:
924 tree rhs1 = gimple_assign_rhs1 (stmt);
925 tree rhs2 = gimple_assign_rhs2 (stmt);
926 bool valueized = false;
927 rhs1 = do_valueize (rhs1, top_valueize, valueized);
928 rhs2 = do_valueize (rhs2, top_valueize, valueized);
929 res_op->set_op (code, type, rhs1, rhs2);
930 return (gimple_resimplify2 (seq, res_op, valueize)
931 || valueized);
933 case GIMPLE_TERNARY_RHS:
935 bool valueized = false;
936 tree rhs1 = gimple_assign_rhs1 (stmt);
937 /* If this is a [VEC_]COND_EXPR first try to simplify an
938 embedded GENERIC condition. */
939 if (code == COND_EXPR
940 || code == VEC_COND_EXPR)
942 if (COMPARISON_CLASS_P (rhs1))
944 tree lhs = TREE_OPERAND (rhs1, 0);
945 tree rhs = TREE_OPERAND (rhs1, 1);
946 lhs = do_valueize (lhs, top_valueize, valueized);
947 rhs = do_valueize (rhs, top_valueize, valueized);
948 gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
949 TREE_TYPE (rhs1), lhs, rhs);
950 if ((gimple_resimplify2 (seq, &res_op2, valueize)
951 || valueized)
952 && res_op2.code.is_tree_code ())
954 valueized = true;
955 if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
956 == tcc_comparison)
957 rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
958 res_op2.ops[0], res_op2.ops[1]);
959 else if (res_op2.code == SSA_NAME
960 || res_op2.code == INTEGER_CST
961 || res_op2.code == VECTOR_CST)
962 rhs1 = res_op2.ops[0];
963 else
964 valueized = false;
968 tree rhs2 = gimple_assign_rhs2 (stmt);
969 tree rhs3 = gimple_assign_rhs3 (stmt);
970 rhs1 = do_valueize (rhs1, top_valueize, valueized);
971 rhs2 = do_valueize (rhs2, top_valueize, valueized);
972 rhs3 = do_valueize (rhs3, top_valueize, valueized);
973 res_op->set_op (code, type, rhs1, rhs2, rhs3);
974 return (gimple_resimplify3 (seq, res_op, valueize)
975 || valueized);
977 default:
978 gcc_unreachable ();
980 break;
983 case GIMPLE_CALL:
984 /* ??? This way we can't simplify calls with side-effects. */
985 if (gimple_call_lhs (stmt) != NULL_TREE
986 && gimple_call_num_args (stmt) >= 1
987 && gimple_call_num_args (stmt) <= 5)
989 bool valueized = false;
990 combined_fn cfn;
991 if (gimple_call_internal_p (stmt))
992 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
993 else
995 tree fn = gimple_call_fn (stmt);
996 if (!fn)
997 return false;
999 fn = do_valueize (fn, top_valueize, valueized);
1000 if (TREE_CODE (fn) != ADDR_EXPR
1001 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1002 return false;
1004 tree decl = TREE_OPERAND (fn, 0);
1005 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1006 || !gimple_builtin_call_types_compatible_p (stmt, decl))
1007 return false;
1009 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1012 unsigned int num_args = gimple_call_num_args (stmt);
1013 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1014 for (unsigned i = 0; i < num_args; ++i)
1016 tree arg = gimple_call_arg (stmt, i);
1017 res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
1019 if (internal_fn_p (cfn)
1020 && try_conditional_simplification (as_internal_fn (cfn),
1021 res_op, seq, valueize))
1022 return true;
1023 switch (num_args)
1025 case 1:
1026 return (gimple_resimplify1 (seq, res_op, valueize)
1027 || valueized);
1028 case 2:
1029 return (gimple_resimplify2 (seq, res_op, valueize)
1030 || valueized);
1031 case 3:
1032 return (gimple_resimplify3 (seq, res_op, valueize)
1033 || valueized);
1034 case 4:
1035 return (gimple_resimplify4 (seq, res_op, valueize)
1036 || valueized);
1037 case 5:
1038 return (gimple_resimplify5 (seq, res_op, valueize)
1039 || valueized);
1040 default:
1041 gcc_unreachable ();
1044 break;
1046 case GIMPLE_COND:
1048 tree lhs = gimple_cond_lhs (stmt);
1049 tree rhs = gimple_cond_rhs (stmt);
1050 bool valueized = false;
1051 lhs = do_valueize (lhs, top_valueize, valueized);
1052 rhs = do_valueize (rhs, top_valueize, valueized);
1053 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1054 return (gimple_resimplify2 (seq, res_op, valueize)
1055 || valueized);
1058 default:
1059 break;
1062 return false;
1066 /* Helper for the autogenerated code, valueize OP. */
1068 inline tree
1069 do_valueize (tree (*valueize)(tree), tree op)
1071 if (valueize && TREE_CODE (op) == SSA_NAME)
1073 tree tem = valueize (op);
1074 if (tem)
1075 return tem;
1077 return op;
1080 /* Helper for the autogenerated code, get at the definition of NAME when
1081 VALUEIZE allows that. */
1083 inline gimple *
1084 get_def (tree (*valueize)(tree), tree name)
1086 if (valueize && ! valueize (name))
1087 return NULL;
1088 return SSA_NAME_DEF_STMT (name);
1091 /* Routine to determine if the types T1 and T2 are effectively
1092 the same for GIMPLE. If T1 or T2 is not a type, the test
1093 applies to their TREE_TYPE. */
1095 static inline bool
1096 types_match (tree t1, tree t2)
1098 if (!TYPE_P (t1))
1099 t1 = TREE_TYPE (t1);
1100 if (!TYPE_P (t2))
1101 t2 = TREE_TYPE (t2);
1103 return types_compatible_p (t1, t2);
1106 /* Return if T has a single use. For GIMPLE, we also allow any
1107 non-SSA_NAME (ie constants) and zero uses to cope with uses
1108 that aren't linked up yet. */
1110 static inline bool
1111 single_use (tree t)
1113 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1116 /* Return true if math operations should be canonicalized,
1117 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1119 static inline bool
1120 canonicalize_math_p ()
1122 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1125 /* Return true if math operations that are beneficial only after
1126 vectorization should be canonicalized. */
1128 static inline bool
1129 canonicalize_math_after_vectorization_p ()
1131 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1134 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1135 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1136 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1137 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1138 will likely be exact, while exp (log (arg0) * arg1) might be not.
1139 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1141 static bool
1142 optimize_pow_to_exp (tree arg0, tree arg1)
1144 gcc_assert (TREE_CODE (arg0) == REAL_CST);
1145 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1146 return true;
1148 if (TREE_CODE (arg1) != SSA_NAME)
1149 return true;
1151 gimple *def = SSA_NAME_DEF_STMT (arg1);
1152 gphi *phi = dyn_cast <gphi *> (def);
1153 tree cst1 = NULL_TREE;
1154 enum tree_code code = ERROR_MARK;
1155 if (!phi)
1157 if (!is_gimple_assign (def))
1158 return true;
1159 code = gimple_assign_rhs_code (def);
1160 switch (code)
1162 case PLUS_EXPR:
1163 case MINUS_EXPR:
1164 break;
1165 default:
1166 return true;
1168 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1169 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1170 return true;
1172 cst1 = gimple_assign_rhs2 (def);
1174 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1175 if (!phi)
1176 return true;
1179 tree cst2 = NULL_TREE;
1180 int n = gimple_phi_num_args (phi);
1181 for (int i = 0; i < n; i++)
1183 tree arg = PHI_ARG_DEF (phi, i);
1184 if (TREE_CODE (arg) != REAL_CST)
1185 continue;
1186 else if (cst2 == NULL_TREE)
1187 cst2 = arg;
1188 else if (!operand_equal_p (cst2, arg, 0))
1189 return true;
1192 if (cst1 && cst2)
1193 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1194 if (cst2
1195 && TREE_CODE (cst2) == REAL_CST
1196 && real_isinteger (TREE_REAL_CST_PTR (cst2),
1197 TYPE_MODE (TREE_TYPE (cst2))))
1198 return false;
1199 return true;
1202 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1203 is another division can be optimized. Don't optimize if INNER_DIV
1204 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1206 static bool
1207 optimize_successive_divisions_p (tree divisor, tree inner_div)
1209 if (!gimple_in_ssa_p (cfun))
1210 return false;
1212 imm_use_iterator imm_iter;
1213 use_operand_p use_p;
1214 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1216 gimple *use_stmt = USE_STMT (use_p);
1217 if (!is_gimple_assign (use_stmt)
1218 || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1219 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1220 continue;
1221 return false;
1223 return true;