Daily bump.
[official-gcc.git] / gcc / gimple-match-head.c
blob9d88b2f85514c109b94e960169e6b04af18bd78f
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
46 #include "tm.h"
47 #include "gimple-range.h"
49 /* Forward declarations of the private auto-generated matchers.
50 They expect valueized operands in canonical order and do not
51 perform simplification of all-constant operands. */
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 code_helper, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 code_helper, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 code_helper, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 code_helper, tree, tree, tree, tree, tree);
60 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
61 code_helper, tree, tree, tree, tree, tree, tree);
62 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
63 tree (*)(tree));
64 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
65 tree (*)(tree));
66 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
67 tree (*)(tree));
68 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
69 tree (*)(tree));
70 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
71 tree (*)(tree));
73 const unsigned int gimple_match_op::MAX_NUM_OPS;
75 /* Return whether T is a constant that we'll dispatch to fold to
76 evaluate fully constant expressions. */
78 static inline bool
79 constant_for_folding (tree t)
81 return (CONSTANT_CLASS_P (t)
82 /* The following is only interesting to string builtins. */
83 || (TREE_CODE (t) == ADDR_EXPR
84 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
87 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
88 operation. Return true on success, storing the new operation in NEW_OP. */
90 static bool
91 convert_conditional_op (gimple_match_op *orig_op,
92 gimple_match_op *new_op)
94 internal_fn ifn;
95 if (orig_op->code.is_tree_code ())
96 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
97 else
99 combined_fn cfn = orig_op->code;
100 if (!internal_fn_p (cfn))
101 return false;
102 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
104 if (ifn == IFN_LAST)
105 return false;
106 unsigned int num_ops = orig_op->num_ops;
107 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
108 new_op->ops[0] = orig_op->cond.cond;
109 for (unsigned int i = 0; i < num_ops; ++i)
110 new_op->ops[i + 1] = orig_op->ops[i];
111 tree else_value = orig_op->cond.else_value;
112 if (!else_value)
113 else_value = targetm.preferred_else_value (ifn, orig_op->type,
114 num_ops, orig_op->ops);
115 new_op->ops[num_ops + 1] = else_value;
116 return true;
119 /* RES_OP is the result of a simplification. If it is conditional,
120 try to replace it with the equivalent UNCOND form, such as an
121 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
122 result of the replacement if appropriate, adding any new statements to
123 SEQ and using VALUEIZE as the valueization function. Return true if
124 this resimplification occurred and resulted in at least one change. */
126 static bool
127 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
128 tree (*valueize) (tree))
130 if (!res_op->cond.cond)
131 return false;
133 if (!res_op->cond.else_value
134 && res_op->code.is_tree_code ())
136 /* The "else" value doesn't matter. If the "then" value is a
137 gimple value, just use it unconditionally. This isn't a
138 simplification in itself, since there was no operation to
139 build in the first place. */
140 if (gimple_simplified_result_is_gimple_val (res_op))
142 res_op->cond.cond = NULL_TREE;
143 return false;
146 /* Likewise if the operation would not trap. */
147 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
148 && TYPE_OVERFLOW_TRAPS (res_op->type));
149 tree_code op_code = (tree_code) res_op->code;
150 bool op_could_trap;
152 /* COND_EXPR will trap if, and only if, the condition
153 traps and hence we have to check this. For all other operations, we
154 don't need to consider the operands. */
155 if (op_code == COND_EXPR)
156 op_could_trap = generic_expr_could_trap_p (res_op->ops[0]);
157 else
158 op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
159 FLOAT_TYPE_P (res_op->type),
160 honor_trapv,
161 res_op->op_or_null (1));
163 if (!op_could_trap)
165 res_op->cond.cond = NULL_TREE;
166 return false;
170 /* If the "then" value is a gimple value and the "else" value matters,
171 create a VEC_COND_EXPR between them, then see if it can be further
172 simplified. */
173 gimple_match_op new_op;
174 if (res_op->cond.else_value
175 && VECTOR_TYPE_P (res_op->type)
176 && gimple_simplified_result_is_gimple_val (res_op))
178 new_op.set_op (VEC_COND_EXPR, res_op->type,
179 res_op->cond.cond, res_op->ops[0],
180 res_op->cond.else_value);
181 *res_op = new_op;
182 return gimple_resimplify3 (seq, res_op, valueize);
185 /* Otherwise try rewriting the operation as an IFN_COND_* call.
186 Again, this isn't a simplification in itself, since it's what
187 RES_OP already described. */
188 if (convert_conditional_op (res_op, &new_op))
189 *res_op = new_op;
191 return false;
194 /* Helper that matches and simplifies the toplevel result from
195 a gimple_simplify run (where we don't want to build
196 a stmt in case it's used in in-place folding). Replaces
197 RES_OP with a simplified and/or canonicalized result and
198 returns whether any change was made. */
200 static bool
201 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
202 tree (*valueize)(tree))
204 if (constant_for_folding (res_op->ops[0]))
206 tree tem = NULL_TREE;
207 if (res_op->code.is_tree_code ())
209 tree_code code = res_op->code;
210 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
211 && TREE_CODE_LENGTH (code) == 1)
212 tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
214 else
215 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
216 res_op->ops[0]);
217 if (tem != NULL_TREE
218 && CONSTANT_CLASS_P (tem))
220 if (TREE_OVERFLOW_P (tem))
221 tem = drop_tree_overflow (tem);
222 res_op->set_value (tem);
223 maybe_resimplify_conditional_op (seq, res_op, valueize);
224 return true;
228 /* Limit recursion, there are cases like PR80887 and others, for
229 example when value-numbering presents us with unfolded expressions
230 that we are really not prepared to handle without eventual
231 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
232 itself as available expression. */
233 static unsigned depth;
234 if (depth > 10)
236 if (dump_file && (dump_flags & TDF_FOLDING))
237 fprintf (dump_file, "Aborting expression simplification due to "
238 "deep recursion\n");
239 return false;
242 ++depth;
243 gimple_match_op res_op2 (*res_op);
244 if (gimple_simplify (&res_op2, seq, valueize,
245 res_op->code, res_op->type, res_op->ops[0]))
247 --depth;
248 *res_op = res_op2;
249 return true;
251 --depth;
253 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
254 return true;
256 return false;
259 /* Helper that matches and simplifies the toplevel result from
260 a gimple_simplify run (where we don't want to build
261 a stmt in case it's used in in-place folding). Replaces
262 RES_OP with a simplified and/or canonicalized result and
263 returns whether any change was made. */
265 static bool
266 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
267 tree (*valueize)(tree))
269 if (constant_for_folding (res_op->ops[0])
270 && constant_for_folding (res_op->ops[1]))
272 tree tem = NULL_TREE;
273 if (res_op->code.is_tree_code ())
275 tree_code code = res_op->code;
276 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
277 && TREE_CODE_LENGTH (code) == 2)
278 tem = const_binop (res_op->code, res_op->type,
279 res_op->ops[0], res_op->ops[1]);
281 else
282 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
283 res_op->ops[0], res_op->ops[1]);
284 if (tem != NULL_TREE
285 && CONSTANT_CLASS_P (tem))
287 if (TREE_OVERFLOW_P (tem))
288 tem = drop_tree_overflow (tem);
289 res_op->set_value (tem);
290 maybe_resimplify_conditional_op (seq, res_op, valueize);
291 return true;
295 /* Canonicalize operand order. */
296 bool canonicalized = false;
297 if (res_op->code.is_tree_code ()
298 && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
299 || commutative_tree_code (res_op->code))
300 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
302 std::swap (res_op->ops[0], res_op->ops[1]);
303 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
304 res_op->code = swap_tree_comparison (res_op->code);
305 canonicalized = true;
308 /* Limit recursion, see gimple_resimplify1. */
309 static unsigned depth;
310 if (depth > 10)
312 if (dump_file && (dump_flags & TDF_FOLDING))
313 fprintf (dump_file, "Aborting expression simplification due to "
314 "deep recursion\n");
315 return false;
318 ++depth;
319 gimple_match_op res_op2 (*res_op);
320 if (gimple_simplify (&res_op2, seq, valueize,
321 res_op->code, res_op->type,
322 res_op->ops[0], res_op->ops[1]))
324 --depth;
325 *res_op = res_op2;
326 return true;
328 --depth;
330 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
331 return true;
333 return canonicalized;
336 /* Helper that matches and simplifies the toplevel result from
337 a gimple_simplify run (where we don't want to build
338 a stmt in case it's used in in-place folding). Replaces
339 RES_OP with a simplified and/or canonicalized result and
340 returns whether any change was made. */
342 static bool
343 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
344 tree (*valueize)(tree))
346 if (constant_for_folding (res_op->ops[0])
347 && constant_for_folding (res_op->ops[1])
348 && constant_for_folding (res_op->ops[2]))
350 tree tem = NULL_TREE;
351 if (res_op->code.is_tree_code ())
353 tree_code code = res_op->code;
354 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
355 && TREE_CODE_LENGTH (code) == 3)
356 tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
357 res_op->ops[0], res_op->ops[1],
358 res_op->ops[2]);
360 else
361 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
362 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
363 if (tem != NULL_TREE
364 && CONSTANT_CLASS_P (tem))
366 if (TREE_OVERFLOW_P (tem))
367 tem = drop_tree_overflow (tem);
368 res_op->set_value (tem);
369 maybe_resimplify_conditional_op (seq, res_op, valueize);
370 return true;
374 /* Canonicalize operand order. */
375 bool canonicalized = false;
376 if (res_op->code.is_tree_code ()
377 && commutative_ternary_tree_code (res_op->code)
378 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
380 std::swap (res_op->ops[0], res_op->ops[1]);
381 canonicalized = true;
384 /* Limit recursion, see gimple_resimplify1. */
385 static unsigned depth;
386 if (depth > 10)
388 if (dump_file && (dump_flags & TDF_FOLDING))
389 fprintf (dump_file, "Aborting expression simplification due to "
390 "deep recursion\n");
391 return false;
394 ++depth;
395 gimple_match_op res_op2 (*res_op);
396 if (gimple_simplify (&res_op2, seq, valueize,
397 res_op->code, res_op->type,
398 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
400 --depth;
401 *res_op = res_op2;
402 return true;
404 --depth;
406 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
407 return true;
409 return canonicalized;
412 /* Helper that matches and simplifies the toplevel result from
413 a gimple_simplify run (where we don't want to build
414 a stmt in case it's used in in-place folding). Replaces
415 RES_OP with a simplified and/or canonicalized result and
416 returns whether any change was made. */
418 static bool
419 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
420 tree (*valueize)(tree))
422 /* No constant folding is defined for four-operand functions. */
424 /* Limit recursion, see gimple_resimplify1. */
425 static unsigned depth;
426 if (depth > 10)
428 if (dump_file && (dump_flags & TDF_FOLDING))
429 fprintf (dump_file, "Aborting expression simplification due to "
430 "deep recursion\n");
431 return false;
434 ++depth;
435 gimple_match_op res_op2 (*res_op);
436 if (gimple_simplify (&res_op2, seq, valueize,
437 res_op->code, res_op->type,
438 res_op->ops[0], res_op->ops[1], res_op->ops[2],
439 res_op->ops[3]))
441 --depth;
442 *res_op = res_op2;
443 return true;
445 --depth;
447 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
448 return true;
450 return false;
453 /* Helper that matches and simplifies the toplevel result from
454 a gimple_simplify run (where we don't want to build
455 a stmt in case it's used in in-place folding). Replaces
456 RES_OP with a simplified and/or canonicalized result and
457 returns whether any change was made. */
459 static bool
460 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
461 tree (*valueize)(tree))
463 /* No constant folding is defined for five-operand functions. */
465 gimple_match_op res_op2 (*res_op);
466 if (gimple_simplify (&res_op2, seq, valueize,
467 res_op->code, res_op->type,
468 res_op->ops[0], res_op->ops[1], res_op->ops[2],
469 res_op->ops[3], res_op->ops[4]))
471 *res_op = res_op2;
472 return true;
475 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
476 return true;
478 return false;
481 /* Match and simplify the toplevel valueized operation THIS.
482 Replaces THIS with a simplified and/or canonicalized result and
483 returns whether any change was made. */
485 bool
486 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
488 switch (num_ops)
490 case 1:
491 return gimple_resimplify1 (seq, this, valueize);
492 case 2:
493 return gimple_resimplify2 (seq, this, valueize);
494 case 3:
495 return gimple_resimplify3 (seq, this, valueize);
496 case 4:
497 return gimple_resimplify4 (seq, this, valueize);
498 case 5:
499 return gimple_resimplify5 (seq, this, valueize);
500 default:
501 gcc_unreachable ();
505 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
506 build a GENERIC tree for that expression and update RES_OP accordingly. */
508 void
509 maybe_build_generic_op (gimple_match_op *res_op)
511 tree_code code = (tree_code) res_op->code;
512 tree val;
513 switch (code)
515 case REALPART_EXPR:
516 case IMAGPART_EXPR:
517 case VIEW_CONVERT_EXPR:
518 val = build1 (code, res_op->type, res_op->ops[0]);
519 res_op->set_value (val);
520 break;
521 case BIT_FIELD_REF:
522 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
523 res_op->ops[2]);
524 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
525 res_op->set_value (val);
526 break;
527 default:;
531 tree (*mprts_hook) (gimple_match_op *);
533 /* Try to build RES_OP, which is known to be a call to FN. Return null
534 if the target doesn't support the function. */
536 static gcall *
537 build_call_internal (internal_fn fn, gimple_match_op *res_op)
539 if (direct_internal_fn_p (fn))
541 tree_pair types = direct_internal_fn_types (fn, res_op->type,
542 res_op->ops);
543 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
544 return NULL;
546 return gimple_build_call_internal (fn, res_op->num_ops,
547 res_op->op_or_null (0),
548 res_op->op_or_null (1),
549 res_op->op_or_null (2),
550 res_op->op_or_null (3),
551 res_op->op_or_null (4));
554 /* Push the exploded expression described by RES_OP as a statement to
555 SEQ if necessary and return a gimple value denoting the value of the
556 expression. If RES is not NULL then the result will be always RES
557 and even gimple values are pushed to SEQ. */
559 tree
560 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
562 tree *ops = res_op->ops;
563 unsigned num_ops = res_op->num_ops;
565 /* The caller should have converted conditional operations into an UNCOND
566 form and resimplified as appropriate. The conditional form only
567 survives this far if that conversion failed. */
568 if (res_op->cond.cond)
569 return NULL_TREE;
571 if (res_op->code.is_tree_code ())
573 if (!res
574 && gimple_simplified_result_is_gimple_val (res_op))
575 return ops[0];
576 if (mprts_hook)
578 tree tem = mprts_hook (res_op);
579 if (tem)
580 return tem;
584 if (!seq)
585 return NULL_TREE;
587 /* Play safe and do not allow abnormals to be mentioned in
588 newly created statements. */
589 for (unsigned int i = 0; i < num_ops; ++i)
590 if (TREE_CODE (ops[i]) == SSA_NAME
591 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
592 return NULL_TREE;
594 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
595 for (unsigned int i = 0; i < 2; ++i)
596 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
597 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
598 return NULL_TREE;
600 if (res_op->code.is_tree_code ())
602 if (!res)
604 if (gimple_in_ssa_p (cfun))
605 res = make_ssa_name (res_op->type);
606 else
607 res = create_tmp_reg (res_op->type);
609 maybe_build_generic_op (res_op);
610 gimple *new_stmt = gimple_build_assign (res, res_op->code,
611 res_op->op_or_null (0),
612 res_op->op_or_null (1),
613 res_op->op_or_null (2));
614 gimple_seq_add_stmt_without_update (seq, new_stmt);
615 return res;
617 else
619 gcc_assert (num_ops != 0);
620 combined_fn fn = res_op->code;
621 gcall *new_stmt = NULL;
622 if (internal_fn_p (fn))
624 /* Generate the given function if we can. */
625 internal_fn ifn = as_internal_fn (fn);
626 new_stmt = build_call_internal (ifn, res_op);
627 if (!new_stmt)
628 return NULL_TREE;
630 else
632 /* Find the function we want to call. */
633 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
634 if (!decl)
635 return NULL;
637 /* We can't and should not emit calls to non-const functions. */
638 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
639 return NULL;
641 new_stmt = gimple_build_call (decl, num_ops,
642 res_op->op_or_null (0),
643 res_op->op_or_null (1),
644 res_op->op_or_null (2),
645 res_op->op_or_null (3),
646 res_op->op_or_null (4));
648 if (!res)
650 if (gimple_in_ssa_p (cfun))
651 res = make_ssa_name (res_op->type);
652 else
653 res = create_tmp_reg (res_op->type);
655 gimple_call_set_lhs (new_stmt, res);
656 gimple_seq_add_stmt_without_update (seq, new_stmt);
657 return res;
662 /* Public API overloads follow for operation being tree_code or
663 built_in_function and for one to three operands or arguments.
664 They return NULL_TREE if nothing could be simplified or
665 the resulting simplified value with parts pushed to SEQ.
666 If SEQ is NULL then if the simplification needs to create
667 new stmts it will fail. If VALUEIZE is non-NULL then all
668 SSA names will be valueized using that hook prior to
669 applying simplifications. */
671 /* Unary ops. */
673 tree
674 gimple_simplify (enum tree_code code, tree type,
675 tree op0,
676 gimple_seq *seq, tree (*valueize)(tree))
678 if (constant_for_folding (op0))
680 tree res = const_unop (code, type, op0);
681 if (res != NULL_TREE
682 && CONSTANT_CLASS_P (res))
683 return res;
686 gimple_match_op res_op;
687 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
688 return NULL_TREE;
689 return maybe_push_res_to_seq (&res_op, seq);
692 /* Binary ops. */
694 tree
695 gimple_simplify (enum tree_code code, tree type,
696 tree op0, tree op1,
697 gimple_seq *seq, tree (*valueize)(tree))
699 if (constant_for_folding (op0) && constant_for_folding (op1))
701 tree res = const_binop (code, type, op0, op1);
702 if (res != NULL_TREE
703 && CONSTANT_CLASS_P (res))
704 return res;
707 /* Canonicalize operand order both for matching and fallback stmt
708 generation. */
709 if ((commutative_tree_code (code)
710 || TREE_CODE_CLASS (code) == tcc_comparison)
711 && tree_swap_operands_p (op0, op1))
713 std::swap (op0, op1);
714 if (TREE_CODE_CLASS (code) == tcc_comparison)
715 code = swap_tree_comparison (code);
718 gimple_match_op res_op;
719 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
720 return NULL_TREE;
721 return maybe_push_res_to_seq (&res_op, seq);
724 /* Ternary ops. */
726 tree
727 gimple_simplify (enum tree_code code, tree type,
728 tree op0, tree op1, tree op2,
729 gimple_seq *seq, tree (*valueize)(tree))
731 if (constant_for_folding (op0) && constant_for_folding (op1)
732 && constant_for_folding (op2))
734 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
735 if (res != NULL_TREE
736 && CONSTANT_CLASS_P (res))
737 return res;
740 /* Canonicalize operand order both for matching and fallback stmt
741 generation. */
742 if (commutative_ternary_tree_code (code)
743 && tree_swap_operands_p (op0, op1))
744 std::swap (op0, op1);
746 gimple_match_op res_op;
747 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
748 return NULL_TREE;
749 return maybe_push_res_to_seq (&res_op, seq);
752 /* Builtin or internal function with one argument. */
754 tree
755 gimple_simplify (combined_fn fn, tree type,
756 tree arg0,
757 gimple_seq *seq, tree (*valueize)(tree))
759 if (constant_for_folding (arg0))
761 tree res = fold_const_call (fn, type, arg0);
762 if (res && CONSTANT_CLASS_P (res))
763 return res;
766 gimple_match_op res_op;
767 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
768 return NULL_TREE;
769 return maybe_push_res_to_seq (&res_op, seq);
772 /* Builtin or internal function with two arguments. */
774 tree
775 gimple_simplify (combined_fn fn, tree type,
776 tree arg0, tree arg1,
777 gimple_seq *seq, tree (*valueize)(tree))
779 if (constant_for_folding (arg0)
780 && constant_for_folding (arg1))
782 tree res = fold_const_call (fn, type, arg0, arg1);
783 if (res && CONSTANT_CLASS_P (res))
784 return res;
787 gimple_match_op res_op;
788 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
789 return NULL_TREE;
790 return maybe_push_res_to_seq (&res_op, seq);
793 /* Builtin or internal function with three arguments. */
795 tree
796 gimple_simplify (combined_fn fn, tree type,
797 tree arg0, tree arg1, tree arg2,
798 gimple_seq *seq, tree (*valueize)(tree))
800 if (constant_for_folding (arg0)
801 && constant_for_folding (arg1)
802 && constant_for_folding (arg2))
804 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
805 if (res && CONSTANT_CLASS_P (res))
806 return res;
809 gimple_match_op res_op;
810 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
811 return NULL_TREE;
812 return maybe_push_res_to_seq (&res_op, seq);
815 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
816 VALUEIZED to true if valueization changed OP. */
818 static inline tree
819 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
821 if (valueize && TREE_CODE (op) == SSA_NAME)
823 tree tem = valueize (op);
824 if (tem && tem != op)
826 op = tem;
827 valueized = true;
830 return op;
833 /* If RES_OP is a call to a conditional internal function, try simplifying
834 the associated unconditional operation and using the result to build
835 a new conditional operation. For example, if RES_OP is:
837 IFN_COND_ADD (COND, A, B, ELSE)
839 try simplifying (plus A B) and using the result to build a replacement
840 for the whole IFN_COND_ADD.
842 Return true if this approach led to a simplification, otherwise leave
843 RES_OP unchanged (and so suitable for other simplifications). When
844 returning true, add any new statements to SEQ and use VALUEIZE as the
845 valueization function.
847 RES_OP is known to be a call to IFN. */
849 static bool
850 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
851 gimple_seq *seq, tree (*valueize) (tree))
853 code_helper op;
854 tree_code code = conditional_internal_fn_code (ifn);
855 if (code != ERROR_MARK)
856 op = code;
857 else
859 ifn = get_unconditional_internal_fn (ifn);
860 if (ifn == IFN_LAST)
861 return false;
862 op = as_combined_fn (ifn);
865 unsigned int num_ops = res_op->num_ops;
866 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
867 res_op->ops[num_ops - 1]),
868 op, res_op->type, num_ops - 2);
870 memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
871 switch (num_ops - 2)
873 case 1:
874 if (!gimple_resimplify1 (seq, &cond_op, valueize))
875 return false;
876 break;
877 case 2:
878 if (!gimple_resimplify2 (seq, &cond_op, valueize))
879 return false;
880 break;
881 case 3:
882 if (!gimple_resimplify3 (seq, &cond_op, valueize))
883 return false;
884 break;
885 default:
886 gcc_unreachable ();
888 *res_op = cond_op;
889 maybe_resimplify_conditional_op (seq, res_op, valueize);
890 return true;
893 /* The main STMT based simplification entry. It is used by the fold_stmt
894 and the fold_stmt_to_constant APIs. */
896 bool
897 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
898 tree (*valueize)(tree), tree (*top_valueize)(tree))
900 switch (gimple_code (stmt))
902 case GIMPLE_ASSIGN:
904 enum tree_code code = gimple_assign_rhs_code (stmt);
905 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
906 switch (gimple_assign_rhs_class (stmt))
908 case GIMPLE_SINGLE_RHS:
909 if (code == REALPART_EXPR
910 || code == IMAGPART_EXPR
911 || code == VIEW_CONVERT_EXPR)
913 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
914 bool valueized = false;
915 op0 = do_valueize (op0, top_valueize, valueized);
916 res_op->set_op (code, type, op0);
917 return (gimple_resimplify1 (seq, res_op, valueize)
918 || valueized);
920 else if (code == BIT_FIELD_REF)
922 tree rhs1 = gimple_assign_rhs1 (stmt);
923 tree op0 = TREE_OPERAND (rhs1, 0);
924 bool valueized = false;
925 op0 = do_valueize (op0, top_valueize, valueized);
926 res_op->set_op (code, type, op0,
927 TREE_OPERAND (rhs1, 1),
928 TREE_OPERAND (rhs1, 2),
929 REF_REVERSE_STORAGE_ORDER (rhs1));
930 if (res_op->reverse)
931 return valueized;
932 return (gimple_resimplify3 (seq, res_op, valueize)
933 || valueized);
935 else if (code == SSA_NAME
936 && top_valueize)
938 tree op0 = gimple_assign_rhs1 (stmt);
939 tree valueized = top_valueize (op0);
940 if (!valueized || op0 == valueized)
941 return false;
942 res_op->set_op (TREE_CODE (op0), type, valueized);
943 return true;
945 break;
946 case GIMPLE_UNARY_RHS:
948 tree rhs1 = gimple_assign_rhs1 (stmt);
949 bool valueized = false;
950 rhs1 = do_valueize (rhs1, top_valueize, valueized);
951 res_op->set_op (code, type, rhs1);
952 return (gimple_resimplify1 (seq, res_op, valueize)
953 || valueized);
955 case GIMPLE_BINARY_RHS:
957 tree rhs1 = gimple_assign_rhs1 (stmt);
958 tree rhs2 = gimple_assign_rhs2 (stmt);
959 bool valueized = false;
960 rhs1 = do_valueize (rhs1, top_valueize, valueized);
961 rhs2 = do_valueize (rhs2, top_valueize, valueized);
962 res_op->set_op (code, type, rhs1, rhs2);
963 return (gimple_resimplify2 (seq, res_op, valueize)
964 || valueized);
966 case GIMPLE_TERNARY_RHS:
968 bool valueized = false;
969 tree rhs1 = gimple_assign_rhs1 (stmt);
970 /* If this is a COND_EXPR first try to simplify an
971 embedded GENERIC condition. */
972 if (code == COND_EXPR)
974 if (COMPARISON_CLASS_P (rhs1))
976 tree lhs = TREE_OPERAND (rhs1, 0);
977 tree rhs = TREE_OPERAND (rhs1, 1);
978 lhs = do_valueize (lhs, top_valueize, valueized);
979 rhs = do_valueize (rhs, top_valueize, valueized);
980 gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
981 TREE_TYPE (rhs1), lhs, rhs);
982 if ((gimple_resimplify2 (seq, &res_op2, valueize)
983 || valueized)
984 && res_op2.code.is_tree_code ())
986 valueized = true;
987 if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
988 == tcc_comparison)
989 rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
990 res_op2.ops[0], res_op2.ops[1]);
991 else if (res_op2.code == SSA_NAME
992 || res_op2.code == INTEGER_CST
993 || res_op2.code == VECTOR_CST)
994 rhs1 = res_op2.ops[0];
995 else
996 valueized = false;
1000 tree rhs2 = gimple_assign_rhs2 (stmt);
1001 tree rhs3 = gimple_assign_rhs3 (stmt);
1002 rhs1 = do_valueize (rhs1, top_valueize, valueized);
1003 rhs2 = do_valueize (rhs2, top_valueize, valueized);
1004 rhs3 = do_valueize (rhs3, top_valueize, valueized);
1005 res_op->set_op (code, type, rhs1, rhs2, rhs3);
1006 return (gimple_resimplify3 (seq, res_op, valueize)
1007 || valueized);
1009 default:
1010 gcc_unreachable ();
1012 break;
1015 case GIMPLE_CALL:
1016 /* ??? This way we can't simplify calls with side-effects. */
1017 if (gimple_call_lhs (stmt) != NULL_TREE
1018 && gimple_call_num_args (stmt) >= 1
1019 && gimple_call_num_args (stmt) <= 5)
1021 bool valueized = false;
1022 combined_fn cfn;
1023 if (gimple_call_internal_p (stmt))
1024 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
1025 else
1027 tree fn = gimple_call_fn (stmt);
1028 if (!fn)
1029 return false;
1031 fn = do_valueize (fn, top_valueize, valueized);
1032 if (TREE_CODE (fn) != ADDR_EXPR
1033 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1034 return false;
1036 tree decl = TREE_OPERAND (fn, 0);
1037 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1038 || !gimple_builtin_call_types_compatible_p (stmt, decl))
1039 return false;
1041 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1044 unsigned int num_args = gimple_call_num_args (stmt);
1045 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1046 for (unsigned i = 0; i < num_args; ++i)
1048 tree arg = gimple_call_arg (stmt, i);
1049 res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
1051 if (internal_fn_p (cfn)
1052 && try_conditional_simplification (as_internal_fn (cfn),
1053 res_op, seq, valueize))
1054 return true;
1055 switch (num_args)
1057 case 1:
1058 return (gimple_resimplify1 (seq, res_op, valueize)
1059 || valueized);
1060 case 2:
1061 return (gimple_resimplify2 (seq, res_op, valueize)
1062 || valueized);
1063 case 3:
1064 return (gimple_resimplify3 (seq, res_op, valueize)
1065 || valueized);
1066 case 4:
1067 return (gimple_resimplify4 (seq, res_op, valueize)
1068 || valueized);
1069 case 5:
1070 return (gimple_resimplify5 (seq, res_op, valueize)
1071 || valueized);
1072 default:
1073 gcc_unreachable ();
1076 break;
1078 case GIMPLE_COND:
1080 tree lhs = gimple_cond_lhs (stmt);
1081 tree rhs = gimple_cond_rhs (stmt);
1082 bool valueized = false;
1083 lhs = do_valueize (lhs, top_valueize, valueized);
1084 rhs = do_valueize (rhs, top_valueize, valueized);
1085 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1086 return (gimple_resimplify2 (seq, res_op, valueize)
1087 || valueized);
1090 default:
1091 break;
1094 return false;
1098 /* Helper for the autogenerated code, valueize OP. */
1100 inline tree
1101 do_valueize (tree (*valueize)(tree), tree op)
1103 if (valueize && TREE_CODE (op) == SSA_NAME)
1105 tree tem = valueize (op);
1106 if (tem)
1107 return tem;
1109 return op;
1112 /* Helper for the autogenerated code, get at the definition of NAME when
1113 VALUEIZE allows that. */
1115 inline gimple *
1116 get_def (tree (*valueize)(tree), tree name)
1118 if (valueize && ! valueize (name))
1119 return NULL;
1120 return SSA_NAME_DEF_STMT (name);
1123 /* Routine to determine if the types T1 and T2 are effectively
1124 the same for GIMPLE. If T1 or T2 is not a type, the test
1125 applies to their TREE_TYPE. */
1127 static inline bool
1128 types_match (tree t1, tree t2)
1130 if (!TYPE_P (t1))
1131 t1 = TREE_TYPE (t1);
1132 if (!TYPE_P (t2))
1133 t2 = TREE_TYPE (t2);
1135 return types_compatible_p (t1, t2);
1138 /* Return if T has a single use. For GIMPLE, we also allow any
1139 non-SSA_NAME (ie constants) and zero uses to cope with uses
1140 that aren't linked up yet. */
1142 static inline bool
1143 single_use (tree t)
1145 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1148 /* Return true if math operations should be canonicalized,
1149 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1151 static inline bool
1152 canonicalize_math_p ()
1154 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1157 /* Return true if math operations that are beneficial only after
1158 vectorization should be canonicalized. */
1160 static inline bool
1161 canonicalize_math_after_vectorization_p ()
1163 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1166 /* Return true if we can still perform transformations that may introduce
1167 vector operations that are not supported by the target. Vector lowering
1168 normally handles those, but after that pass, it becomes unsafe. */
1170 static inline bool
1171 optimize_vectors_before_lowering_p ()
1173 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
1176 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1177 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1178 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1179 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1180 will likely be exact, while exp (log (arg0) * arg1) might be not.
1181 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1183 static bool
1184 optimize_pow_to_exp (tree arg0, tree arg1)
1186 gcc_assert (TREE_CODE (arg0) == REAL_CST);
1187 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1188 return true;
1190 if (TREE_CODE (arg1) != SSA_NAME)
1191 return true;
1193 gimple *def = SSA_NAME_DEF_STMT (arg1);
1194 gphi *phi = dyn_cast <gphi *> (def);
1195 tree cst1 = NULL_TREE;
1196 enum tree_code code = ERROR_MARK;
1197 if (!phi)
1199 if (!is_gimple_assign (def))
1200 return true;
1201 code = gimple_assign_rhs_code (def);
1202 switch (code)
1204 case PLUS_EXPR:
1205 case MINUS_EXPR:
1206 break;
1207 default:
1208 return true;
1210 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1211 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1212 return true;
1214 cst1 = gimple_assign_rhs2 (def);
1216 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1217 if (!phi)
1218 return true;
1221 tree cst2 = NULL_TREE;
1222 int n = gimple_phi_num_args (phi);
1223 for (int i = 0; i < n; i++)
1225 tree arg = PHI_ARG_DEF (phi, i);
1226 if (TREE_CODE (arg) != REAL_CST)
1227 continue;
1228 else if (cst2 == NULL_TREE)
1229 cst2 = arg;
1230 else if (!operand_equal_p (cst2, arg, 0))
1231 return true;
1234 if (cst1 && cst2)
1235 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1236 if (cst2
1237 && TREE_CODE (cst2) == REAL_CST
1238 && real_isinteger (TREE_REAL_CST_PTR (cst2),
1239 TYPE_MODE (TREE_TYPE (cst2))))
1240 return false;
1241 return true;
1244 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1245 is another division can be optimized. Don't optimize if INNER_DIV
1246 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1248 static bool
1249 optimize_successive_divisions_p (tree divisor, tree inner_div)
1251 if (!gimple_in_ssa_p (cfun))
1252 return false;
1254 imm_use_iterator imm_iter;
1255 use_operand_p use_p;
1256 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1258 gimple *use_stmt = USE_STMT (use_p);
1259 if (!is_gimple_assign (use_stmt)
1260 || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1261 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1262 continue;
1263 return false;
1265 return true;