c++: Add test for C++23 auto(x)
[official-gcc.git] / gcc / gimple-match-head.c
blob2d9364ca5defc9215aeafb83eaef5170105c789a
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
46 #include "tm.h"
47 #include "gimple-range.h"
49 /* Forward declarations of the private auto-generated matchers.
50 They expect valueized operands in canonical order and do not
51 perform simplification of all-constant operands. */
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 code_helper, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 code_helper, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 code_helper, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 code_helper, tree, tree, tree, tree, tree);
60 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
61 code_helper, tree, tree, tree, tree, tree, tree);
62 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
63 tree (*)(tree));
64 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
65 tree (*)(tree));
66 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
67 tree (*)(tree));
68 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
69 tree (*)(tree));
70 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
71 tree (*)(tree));
73 const unsigned int gimple_match_op::MAX_NUM_OPS;
75 /* Return whether T is a constant that we'll dispatch to fold to
76 evaluate fully constant expressions. */
78 static inline bool
79 constant_for_folding (tree t)
81 return (CONSTANT_CLASS_P (t)
82 /* The following is only interesting to string builtins. */
83 || (TREE_CODE (t) == ADDR_EXPR
84 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
87 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
88 operation. Return true on success, storing the new operation in NEW_OP. */
90 static bool
91 convert_conditional_op (gimple_match_op *orig_op,
92 gimple_match_op *new_op)
94 internal_fn ifn;
95 if (orig_op->code.is_tree_code ())
96 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
97 else
99 auto cfn = combined_fn (orig_op->code);
100 if (!internal_fn_p (cfn))
101 return false;
102 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
104 if (ifn == IFN_LAST)
105 return false;
106 unsigned int num_ops = orig_op->num_ops;
107 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
108 new_op->ops[0] = orig_op->cond.cond;
109 for (unsigned int i = 0; i < num_ops; ++i)
110 new_op->ops[i + 1] = orig_op->ops[i];
111 tree else_value = orig_op->cond.else_value;
112 if (!else_value)
113 else_value = targetm.preferred_else_value (ifn, orig_op->type,
114 num_ops, orig_op->ops);
115 new_op->ops[num_ops + 1] = else_value;
116 return true;
119 /* RES_OP is the result of a simplification. If it is conditional,
120 try to replace it with the equivalent UNCOND form, such as an
121 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
122 result of the replacement if appropriate, adding any new statements to
123 SEQ and using VALUEIZE as the valueization function. Return true if
124 this resimplification occurred and resulted in at least one change. */
126 static bool
127 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
128 tree (*valueize) (tree))
130 if (!res_op->cond.cond)
131 return false;
133 if (!res_op->cond.else_value
134 && res_op->code.is_tree_code ())
136 /* The "else" value doesn't matter. If the "then" value is a
137 gimple value, just use it unconditionally. This isn't a
138 simplification in itself, since there was no operation to
139 build in the first place. */
140 if (gimple_simplified_result_is_gimple_val (res_op))
142 res_op->cond.cond = NULL_TREE;
143 return false;
146 /* Likewise if the operation would not trap. */
147 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
148 && TYPE_OVERFLOW_TRAPS (res_op->type));
149 tree_code op_code = (tree_code) res_op->code;
150 bool op_could_trap;
152 /* COND_EXPR will trap if, and only if, the condition
153 traps and hence we have to check this. For all other operations, we
154 don't need to consider the operands. */
155 if (op_code == COND_EXPR)
156 op_could_trap = generic_expr_could_trap_p (res_op->ops[0]);
157 else
158 op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
159 FLOAT_TYPE_P (res_op->type),
160 honor_trapv,
161 res_op->op_or_null (1));
163 if (!op_could_trap)
165 res_op->cond.cond = NULL_TREE;
166 return false;
170 /* If the "then" value is a gimple value and the "else" value matters,
171 create a VEC_COND_EXPR between them, then see if it can be further
172 simplified. */
173 gimple_match_op new_op;
174 if (res_op->cond.else_value
175 && VECTOR_TYPE_P (res_op->type)
176 && gimple_simplified_result_is_gimple_val (res_op))
178 new_op.set_op (VEC_COND_EXPR, res_op->type,
179 res_op->cond.cond, res_op->ops[0],
180 res_op->cond.else_value);
181 *res_op = new_op;
182 return gimple_resimplify3 (seq, res_op, valueize);
185 /* Otherwise try rewriting the operation as an IFN_COND_* call.
186 Again, this isn't a simplification in itself, since it's what
187 RES_OP already described. */
188 if (convert_conditional_op (res_op, &new_op))
189 *res_op = new_op;
191 return false;
194 /* Helper that matches and simplifies the toplevel result from
195 a gimple_simplify run (where we don't want to build
196 a stmt in case it's used in in-place folding). Replaces
197 RES_OP with a simplified and/or canonicalized result and
198 returns whether any change was made. */
200 static bool
201 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
202 tree (*valueize)(tree))
204 if (constant_for_folding (res_op->ops[0]))
206 tree tem = NULL_TREE;
207 if (res_op->code.is_tree_code ())
209 auto code = tree_code (res_op->code);
210 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
211 && TREE_CODE_LENGTH (code) == 1)
212 tem = const_unop (code, res_op->type, res_op->ops[0]);
214 else
215 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
216 res_op->ops[0]);
217 if (tem != NULL_TREE
218 && CONSTANT_CLASS_P (tem))
220 if (TREE_OVERFLOW_P (tem))
221 tem = drop_tree_overflow (tem);
222 res_op->set_value (tem);
223 maybe_resimplify_conditional_op (seq, res_op, valueize);
224 return true;
228 /* Limit recursion, there are cases like PR80887 and others, for
229 example when value-numbering presents us with unfolded expressions
230 that we are really not prepared to handle without eventual
231 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
232 itself as available expression. */
233 static unsigned depth;
234 if (depth > 10)
236 if (dump_file && (dump_flags & TDF_FOLDING))
237 fprintf (dump_file, "Aborting expression simplification due to "
238 "deep recursion\n");
239 return false;
242 ++depth;
243 gimple_match_op res_op2 (*res_op);
244 if (gimple_simplify (&res_op2, seq, valueize,
245 res_op->code, res_op->type, res_op->ops[0]))
247 --depth;
248 *res_op = res_op2;
249 return true;
251 --depth;
253 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
254 return true;
256 return false;
259 /* Helper that matches and simplifies the toplevel result from
260 a gimple_simplify run (where we don't want to build
261 a stmt in case it's used in in-place folding). Replaces
262 RES_OP with a simplified and/or canonicalized result and
263 returns whether any change was made. */
265 static bool
266 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
267 tree (*valueize)(tree))
269 if (constant_for_folding (res_op->ops[0])
270 && constant_for_folding (res_op->ops[1]))
272 tree tem = NULL_TREE;
273 if (res_op->code.is_tree_code ())
275 auto code = tree_code (res_op->code);
276 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
277 && TREE_CODE_LENGTH (code) == 2)
278 tem = const_binop (code, res_op->type,
279 res_op->ops[0], res_op->ops[1]);
281 else
282 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
283 res_op->ops[0], res_op->ops[1]);
284 if (tem != NULL_TREE
285 && CONSTANT_CLASS_P (tem))
287 if (TREE_OVERFLOW_P (tem))
288 tem = drop_tree_overflow (tem);
289 res_op->set_value (tem);
290 maybe_resimplify_conditional_op (seq, res_op, valueize);
291 return true;
295 /* Canonicalize operand order. */
296 bool canonicalized = false;
297 bool is_comparison
298 = (res_op->code.is_tree_code ()
299 && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison);
300 if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type))
301 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
303 std::swap (res_op->ops[0], res_op->ops[1]);
304 if (is_comparison)
305 res_op->code = swap_tree_comparison (tree_code (res_op->code));
306 canonicalized = true;
309 /* Limit recursion, see gimple_resimplify1. */
310 static unsigned depth;
311 if (depth > 10)
313 if (dump_file && (dump_flags & TDF_FOLDING))
314 fprintf (dump_file, "Aborting expression simplification due to "
315 "deep recursion\n");
316 return false;
319 ++depth;
320 gimple_match_op res_op2 (*res_op);
321 if (gimple_simplify (&res_op2, seq, valueize,
322 res_op->code, res_op->type,
323 res_op->ops[0], res_op->ops[1]))
325 --depth;
326 *res_op = res_op2;
327 return true;
329 --depth;
331 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
332 return true;
334 return canonicalized;
337 /* Helper that matches and simplifies the toplevel result from
338 a gimple_simplify run (where we don't want to build
339 a stmt in case it's used in in-place folding). Replaces
340 RES_OP with a simplified and/or canonicalized result and
341 returns whether any change was made. */
343 static bool
344 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
345 tree (*valueize)(tree))
347 if (constant_for_folding (res_op->ops[0])
348 && constant_for_folding (res_op->ops[1])
349 && constant_for_folding (res_op->ops[2]))
351 tree tem = NULL_TREE;
352 if (res_op->code.is_tree_code ())
354 auto code = tree_code (res_op->code);
355 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
356 && TREE_CODE_LENGTH (code) == 3)
357 tem = fold_ternary/*_to_constant*/ (code, res_op->type,
358 res_op->ops[0], res_op->ops[1],
359 res_op->ops[2]);
361 else
362 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
363 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
364 if (tem != NULL_TREE
365 && CONSTANT_CLASS_P (tem))
367 if (TREE_OVERFLOW_P (tem))
368 tem = drop_tree_overflow (tem);
369 res_op->set_value (tem);
370 maybe_resimplify_conditional_op (seq, res_op, valueize);
371 return true;
375 /* Canonicalize operand order. */
376 bool canonicalized = false;
377 int argno = first_commutative_argument (res_op->code, res_op->type);
378 if (argno >= 0
379 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
381 std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
382 canonicalized = true;
385 /* Limit recursion, see gimple_resimplify1. */
386 static unsigned depth;
387 if (depth > 10)
389 if (dump_file && (dump_flags & TDF_FOLDING))
390 fprintf (dump_file, "Aborting expression simplification due to "
391 "deep recursion\n");
392 return false;
395 ++depth;
396 gimple_match_op res_op2 (*res_op);
397 if (gimple_simplify (&res_op2, seq, valueize,
398 res_op->code, res_op->type,
399 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
401 --depth;
402 *res_op = res_op2;
403 return true;
405 --depth;
407 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
408 return true;
410 return canonicalized;
413 /* Helper that matches and simplifies the toplevel result from
414 a gimple_simplify run (where we don't want to build
415 a stmt in case it's used in in-place folding). Replaces
416 RES_OP with a simplified and/or canonicalized result and
417 returns whether any change was made. */
419 static bool
420 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
421 tree (*valueize)(tree))
423 /* No constant folding is defined for four-operand functions. */
425 /* Canonicalize operand order. */
426 bool canonicalized = false;
427 int argno = first_commutative_argument (res_op->code, res_op->type);
428 if (argno >= 0
429 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
431 std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
432 canonicalized = true;
435 /* Limit recursion, see gimple_resimplify1. */
436 static unsigned depth;
437 if (depth > 10)
439 if (dump_file && (dump_flags & TDF_FOLDING))
440 fprintf (dump_file, "Aborting expression simplification due to "
441 "deep recursion\n");
442 return false;
445 ++depth;
446 gimple_match_op res_op2 (*res_op);
447 if (gimple_simplify (&res_op2, seq, valueize,
448 res_op->code, res_op->type,
449 res_op->ops[0], res_op->ops[1], res_op->ops[2],
450 res_op->ops[3]))
452 --depth;
453 *res_op = res_op2;
454 return true;
456 --depth;
458 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
459 return true;
461 return canonicalized;
464 /* Helper that matches and simplifies the toplevel result from
465 a gimple_simplify run (where we don't want to build
466 a stmt in case it's used in in-place folding). Replaces
467 RES_OP with a simplified and/or canonicalized result and
468 returns whether any change was made. */
470 static bool
471 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
472 tree (*valueize)(tree))
474 /* No constant folding is defined for five-operand functions. */
476 /* Canonicalize operand order. */
477 bool canonicalized = false;
478 int argno = first_commutative_argument (res_op->code, res_op->type);
479 if (argno >= 0
480 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
482 std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
483 canonicalized = true;
486 gimple_match_op res_op2 (*res_op);
487 if (gimple_simplify (&res_op2, seq, valueize,
488 res_op->code, res_op->type,
489 res_op->ops[0], res_op->ops[1], res_op->ops[2],
490 res_op->ops[3], res_op->ops[4]))
492 *res_op = res_op2;
493 return true;
496 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
497 return true;
499 return canonicalized;
502 /* Match and simplify the toplevel valueized operation THIS.
503 Replaces THIS with a simplified and/or canonicalized result and
504 returns whether any change was made. */
506 bool
507 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
509 switch (num_ops)
511 case 1:
512 return gimple_resimplify1 (seq, this, valueize);
513 case 2:
514 return gimple_resimplify2 (seq, this, valueize);
515 case 3:
516 return gimple_resimplify3 (seq, this, valueize);
517 case 4:
518 return gimple_resimplify4 (seq, this, valueize);
519 case 5:
520 return gimple_resimplify5 (seq, this, valueize);
521 default:
522 gcc_unreachable ();
526 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
527 build a GENERIC tree for that expression and update RES_OP accordingly. */
529 void
530 maybe_build_generic_op (gimple_match_op *res_op)
532 tree_code code = (tree_code) res_op->code;
533 tree val;
534 switch (code)
536 case REALPART_EXPR:
537 case IMAGPART_EXPR:
538 case VIEW_CONVERT_EXPR:
539 val = build1 (code, res_op->type, res_op->ops[0]);
540 res_op->set_value (val);
541 break;
542 case BIT_FIELD_REF:
543 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
544 res_op->ops[2]);
545 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
546 res_op->set_value (val);
547 break;
548 default:;
552 tree (*mprts_hook) (gimple_match_op *);
554 /* Try to build RES_OP, which is known to be a call to FN. Return null
555 if the target doesn't support the function. */
557 static gcall *
558 build_call_internal (internal_fn fn, gimple_match_op *res_op)
560 if (direct_internal_fn_p (fn))
562 tree_pair types = direct_internal_fn_types (fn, res_op->type,
563 res_op->ops);
564 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
565 return NULL;
567 return gimple_build_call_internal (fn, res_op->num_ops,
568 res_op->op_or_null (0),
569 res_op->op_or_null (1),
570 res_op->op_or_null (2),
571 res_op->op_or_null (3),
572 res_op->op_or_null (4));
575 /* Push the exploded expression described by RES_OP as a statement to
576 SEQ if necessary and return a gimple value denoting the value of the
577 expression. If RES is not NULL then the result will be always RES
578 and even gimple values are pushed to SEQ. */
580 tree
581 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
583 tree *ops = res_op->ops;
584 unsigned num_ops = res_op->num_ops;
586 /* The caller should have converted conditional operations into an UNCOND
587 form and resimplified as appropriate. The conditional form only
588 survives this far if that conversion failed. */
589 if (res_op->cond.cond)
590 return NULL_TREE;
592 if (res_op->code.is_tree_code ())
594 if (!res
595 && gimple_simplified_result_is_gimple_val (res_op))
596 return ops[0];
597 if (mprts_hook)
599 tree tem = mprts_hook (res_op);
600 if (tem)
601 return tem;
605 if (!seq)
606 return NULL_TREE;
608 /* Play safe and do not allow abnormals to be mentioned in
609 newly created statements. */
610 for (unsigned int i = 0; i < num_ops; ++i)
611 if (TREE_CODE (ops[i]) == SSA_NAME
612 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
613 return NULL_TREE;
615 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
616 for (unsigned int i = 0; i < 2; ++i)
617 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
618 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
619 return NULL_TREE;
621 if (res_op->code.is_tree_code ())
623 auto code = tree_code (res_op->code);
624 if (!res)
626 if (gimple_in_ssa_p (cfun))
627 res = make_ssa_name (res_op->type);
628 else
629 res = create_tmp_reg (res_op->type);
631 maybe_build_generic_op (res_op);
632 gimple *new_stmt = gimple_build_assign (res, code,
633 res_op->op_or_null (0),
634 res_op->op_or_null (1),
635 res_op->op_or_null (2));
636 gimple_seq_add_stmt_without_update (seq, new_stmt);
637 return res;
639 else
641 gcc_assert (num_ops != 0);
642 auto fn = combined_fn (res_op->code);
643 gcall *new_stmt = NULL;
644 if (internal_fn_p (fn))
646 /* Generate the given function if we can. */
647 internal_fn ifn = as_internal_fn (fn);
648 new_stmt = build_call_internal (ifn, res_op);
649 if (!new_stmt)
650 return NULL_TREE;
652 else
654 /* Find the function we want to call. */
655 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
656 if (!decl)
657 return NULL;
659 /* We can't and should not emit calls to non-const functions. */
660 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
661 return NULL;
663 new_stmt = gimple_build_call (decl, num_ops,
664 res_op->op_or_null (0),
665 res_op->op_or_null (1),
666 res_op->op_or_null (2),
667 res_op->op_or_null (3),
668 res_op->op_or_null (4));
670 if (!res)
672 if (gimple_in_ssa_p (cfun))
673 res = make_ssa_name (res_op->type);
674 else
675 res = create_tmp_reg (res_op->type);
677 gimple_call_set_lhs (new_stmt, res);
678 gimple_seq_add_stmt_without_update (seq, new_stmt);
679 return res;
684 /* Public API overloads follow for operation being tree_code or
685 built_in_function and for one to three operands or arguments.
686 They return NULL_TREE if nothing could be simplified or
687 the resulting simplified value with parts pushed to SEQ.
688 If SEQ is NULL then if the simplification needs to create
689 new stmts it will fail. If VALUEIZE is non-NULL then all
690 SSA names will be valueized using that hook prior to
691 applying simplifications. */
693 /* Unary ops. */
695 tree
696 gimple_simplify (enum tree_code code, tree type,
697 tree op0,
698 gimple_seq *seq, tree (*valueize)(tree))
700 if (constant_for_folding (op0))
702 tree res = const_unop (code, type, op0);
703 if (res != NULL_TREE
704 && CONSTANT_CLASS_P (res))
705 return res;
708 gimple_match_op res_op;
709 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
710 return NULL_TREE;
711 return maybe_push_res_to_seq (&res_op, seq);
714 /* Binary ops. */
716 tree
717 gimple_simplify (enum tree_code code, tree type,
718 tree op0, tree op1,
719 gimple_seq *seq, tree (*valueize)(tree))
721 if (constant_for_folding (op0) && constant_for_folding (op1))
723 tree res = const_binop (code, type, op0, op1);
724 if (res != NULL_TREE
725 && CONSTANT_CLASS_P (res))
726 return res;
729 /* Canonicalize operand order both for matching and fallback stmt
730 generation. */
731 if ((commutative_tree_code (code)
732 || TREE_CODE_CLASS (code) == tcc_comparison)
733 && tree_swap_operands_p (op0, op1))
735 std::swap (op0, op1);
736 if (TREE_CODE_CLASS (code) == tcc_comparison)
737 code = swap_tree_comparison (code);
740 gimple_match_op res_op;
741 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
742 return NULL_TREE;
743 return maybe_push_res_to_seq (&res_op, seq);
746 /* Ternary ops. */
748 tree
749 gimple_simplify (enum tree_code code, tree type,
750 tree op0, tree op1, tree op2,
751 gimple_seq *seq, tree (*valueize)(tree))
753 if (constant_for_folding (op0) && constant_for_folding (op1)
754 && constant_for_folding (op2))
756 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
757 if (res != NULL_TREE
758 && CONSTANT_CLASS_P (res))
759 return res;
762 /* Canonicalize operand order both for matching and fallback stmt
763 generation. */
764 if (commutative_ternary_tree_code (code)
765 && tree_swap_operands_p (op0, op1))
766 std::swap (op0, op1);
768 gimple_match_op res_op;
769 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
770 return NULL_TREE;
771 return maybe_push_res_to_seq (&res_op, seq);
774 /* Builtin or internal function with one argument. */
776 tree
777 gimple_simplify (combined_fn fn, tree type,
778 tree arg0,
779 gimple_seq *seq, tree (*valueize)(tree))
781 if (constant_for_folding (arg0))
783 tree res = fold_const_call (fn, type, arg0);
784 if (res && CONSTANT_CLASS_P (res))
785 return res;
788 gimple_match_op res_op;
789 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
790 return NULL_TREE;
791 return maybe_push_res_to_seq (&res_op, seq);
794 /* Builtin or internal function with two arguments. */
796 tree
797 gimple_simplify (combined_fn fn, tree type,
798 tree arg0, tree arg1,
799 gimple_seq *seq, tree (*valueize)(tree))
801 if (constant_for_folding (arg0)
802 && constant_for_folding (arg1))
804 tree res = fold_const_call (fn, type, arg0, arg1);
805 if (res && CONSTANT_CLASS_P (res))
806 return res;
809 gimple_match_op res_op;
810 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
811 return NULL_TREE;
812 return maybe_push_res_to_seq (&res_op, seq);
815 /* Builtin or internal function with three arguments. */
817 tree
818 gimple_simplify (combined_fn fn, tree type,
819 tree arg0, tree arg1, tree arg2,
820 gimple_seq *seq, tree (*valueize)(tree))
822 if (constant_for_folding (arg0)
823 && constant_for_folding (arg1)
824 && constant_for_folding (arg2))
826 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
827 if (res && CONSTANT_CLASS_P (res))
828 return res;
831 gimple_match_op res_op;
832 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
833 return NULL_TREE;
834 return maybe_push_res_to_seq (&res_op, seq);
837 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
838 VALUEIZED to true if valueization changed OP. */
840 static inline tree
841 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
843 if (valueize && TREE_CODE (op) == SSA_NAME)
845 tree tem = valueize (op);
846 if (tem && tem != op)
848 op = tem;
849 valueized = true;
852 return op;
855 /* If RES_OP is a call to a conditional internal function, try simplifying
856 the associated unconditional operation and using the result to build
857 a new conditional operation. For example, if RES_OP is:
859 IFN_COND_ADD (COND, A, B, ELSE)
861 try simplifying (plus A B) and using the result to build a replacement
862 for the whole IFN_COND_ADD.
864 Return true if this approach led to a simplification, otherwise leave
865 RES_OP unchanged (and so suitable for other simplifications). When
866 returning true, add any new statements to SEQ and use VALUEIZE as the
867 valueization function.
869 RES_OP is known to be a call to IFN. */
871 static bool
872 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
873 gimple_seq *seq, tree (*valueize) (tree))
875 code_helper op;
876 tree_code code = conditional_internal_fn_code (ifn);
877 if (code != ERROR_MARK)
878 op = code;
879 else
881 ifn = get_unconditional_internal_fn (ifn);
882 if (ifn == IFN_LAST)
883 return false;
884 op = as_combined_fn (ifn);
887 unsigned int num_ops = res_op->num_ops;
888 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
889 res_op->ops[num_ops - 1]),
890 op, res_op->type, num_ops - 2);
892 memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
893 switch (num_ops - 2)
895 case 1:
896 if (!gimple_resimplify1 (seq, &cond_op, valueize))
897 return false;
898 break;
899 case 2:
900 if (!gimple_resimplify2 (seq, &cond_op, valueize))
901 return false;
902 break;
903 case 3:
904 if (!gimple_resimplify3 (seq, &cond_op, valueize))
905 return false;
906 break;
907 default:
908 gcc_unreachable ();
910 *res_op = cond_op;
911 maybe_resimplify_conditional_op (seq, res_op, valueize);
912 return true;
915 /* Common subroutine of gimple_extract_op and gimple_simplify. Try to
916 describe STMT in RES_OP, returning true on success. Before recording
917 an operand, call:
919 - VALUEIZE_CONDITION for a COND_EXPR condition
920 - VALUEIZE_OP for every other top-level operand
922 Both routines take a tree argument and returns a tree. */
924 template<typename ValueizeOp, typename ValueizeCondition>
925 inline bool
926 gimple_extract (gimple *stmt, gimple_match_op *res_op,
927 ValueizeOp valueize_op,
928 ValueizeCondition valueize_condition)
930 switch (gimple_code (stmt))
932 case GIMPLE_ASSIGN:
934 enum tree_code code = gimple_assign_rhs_code (stmt);
935 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
936 switch (gimple_assign_rhs_class (stmt))
938 case GIMPLE_SINGLE_RHS:
939 if (code == REALPART_EXPR
940 || code == IMAGPART_EXPR
941 || code == VIEW_CONVERT_EXPR)
943 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
944 res_op->set_op (code, type, valueize_op (op0));
945 return true;
947 else if (code == BIT_FIELD_REF)
949 tree rhs1 = gimple_assign_rhs1 (stmt);
950 tree op0 = valueize_op (TREE_OPERAND (rhs1, 0));
951 res_op->set_op (code, type, op0,
952 TREE_OPERAND (rhs1, 1),
953 TREE_OPERAND (rhs1, 2),
954 REF_REVERSE_STORAGE_ORDER (rhs1));
955 return true;
957 else if (code == SSA_NAME)
959 tree op0 = gimple_assign_rhs1 (stmt);
960 res_op->set_op (TREE_CODE (op0), type, valueize_op (op0));
961 return true;
963 break;
964 case GIMPLE_UNARY_RHS:
966 tree rhs1 = gimple_assign_rhs1 (stmt);
967 res_op->set_op (code, type, valueize_op (rhs1));
968 return true;
970 case GIMPLE_BINARY_RHS:
972 tree rhs1 = valueize_op (gimple_assign_rhs1 (stmt));
973 tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
974 res_op->set_op (code, type, rhs1, rhs2);
975 return true;
977 case GIMPLE_TERNARY_RHS:
979 tree rhs1 = gimple_assign_rhs1 (stmt);
980 if (code == COND_EXPR && COMPARISON_CLASS_P (rhs1))
981 rhs1 = valueize_condition (rhs1);
982 else
983 rhs1 = valueize_op (rhs1);
984 tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
985 tree rhs3 = valueize_op (gimple_assign_rhs3 (stmt));
986 res_op->set_op (code, type, rhs1, rhs2, rhs3);
987 return true;
989 default:
990 gcc_unreachable ();
992 break;
995 case GIMPLE_CALL:
996 /* ??? This way we can't simplify calls with side-effects. */
997 if (gimple_call_lhs (stmt) != NULL_TREE
998 && gimple_call_num_args (stmt) >= 1
999 && gimple_call_num_args (stmt) <= 5)
1001 combined_fn cfn;
1002 if (gimple_call_internal_p (stmt))
1003 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
1004 else
1006 tree fn = gimple_call_fn (stmt);
1007 if (!fn)
1008 return false;
1010 fn = valueize_op (fn);
1011 if (TREE_CODE (fn) != ADDR_EXPR
1012 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1013 return false;
1015 tree decl = TREE_OPERAND (fn, 0);
1016 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1017 || !gimple_builtin_call_types_compatible_p (stmt, decl))
1018 return false;
1020 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1023 unsigned int num_args = gimple_call_num_args (stmt);
1024 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1025 for (unsigned i = 0; i < num_args; ++i)
1026 res_op->ops[i] = valueize_op (gimple_call_arg (stmt, i));
1027 return true;
1029 break;
1031 case GIMPLE_COND:
1033 tree lhs = valueize_op (gimple_cond_lhs (stmt));
1034 tree rhs = valueize_op (gimple_cond_rhs (stmt));
1035 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1036 return true;
1039 default:
1040 break;
1043 return false;
1046 /* Try to describe STMT in RES_OP, returning true on success.
1047 For GIMPLE_CONDs, describe the condition that is being tested.
1048 For GIMPLE_ASSIGNs, describe the rhs of the assignment.
1049 For GIMPLE_CALLs, describe the call. */
1051 bool
1052 gimple_extract_op (gimple *stmt, gimple_match_op *res_op)
1054 auto nop = [](tree op) { return op; };
1055 return gimple_extract (stmt, res_op, nop, nop);
1058 /* The main STMT based simplification entry. It is used by the fold_stmt
1059 and the fold_stmt_to_constant APIs. */
1061 bool
1062 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
1063 tree (*valueize)(tree), tree (*top_valueize)(tree))
1065 bool valueized = false;
1066 auto valueize_op = [&](tree op)
1068 return do_valueize (op, top_valueize, valueized);
1070 auto valueize_condition = [&](tree op) -> tree
1072 bool cond_valueized = false;
1073 tree lhs = do_valueize (TREE_OPERAND (op, 0), top_valueize,
1074 cond_valueized);
1075 tree rhs = do_valueize (TREE_OPERAND (op, 1), top_valueize,
1076 cond_valueized);
1077 gimple_match_op res_op2 (res_op->cond, TREE_CODE (op),
1078 TREE_TYPE (op), lhs, rhs);
1079 if ((gimple_resimplify2 (seq, &res_op2, valueize)
1080 || cond_valueized)
1081 && res_op2.code.is_tree_code ())
1083 auto code = tree_code (res_op2.code);
1084 if (TREE_CODE_CLASS (code) == tcc_comparison)
1086 valueized = true;
1087 return build2 (code, TREE_TYPE (op),
1088 res_op2.ops[0], res_op2.ops[1]);
1090 else if (code == SSA_NAME
1091 || code == INTEGER_CST
1092 || code == VECTOR_CST)
1094 valueized = true;
1095 return res_op2.ops[0];
1098 return valueize_op (op);
1101 if (!gimple_extract (stmt, res_op, valueize_op, valueize_condition))
1102 return false;
1104 if (res_op->code.is_internal_fn ())
1106 internal_fn ifn = internal_fn (res_op->code);
1107 if (try_conditional_simplification (ifn, res_op, seq, valueize))
1108 return true;
1111 if (!res_op->reverse
1112 && res_op->num_ops
1113 && res_op->resimplify (seq, valueize))
1114 return true;
1116 return valueized;
1119 /* Helper for the autogenerated code, valueize OP. */
1121 inline tree
1122 do_valueize (tree (*valueize)(tree), tree op)
1124 if (valueize && TREE_CODE (op) == SSA_NAME)
1126 tree tem = valueize (op);
1127 if (tem)
1128 return tem;
1130 return op;
1133 /* Helper for the autogenerated code, get at the definition of NAME when
1134 VALUEIZE allows that. */
1136 inline gimple *
1137 get_def (tree (*valueize)(tree), tree name)
1139 if (valueize && ! valueize (name))
1140 return NULL;
1141 return SSA_NAME_DEF_STMT (name);
1144 /* Routine to determine if the types T1 and T2 are effectively
1145 the same for GIMPLE. If T1 or T2 is not a type, the test
1146 applies to their TREE_TYPE. */
1148 static inline bool
1149 types_match (tree t1, tree t2)
1151 if (!TYPE_P (t1))
1152 t1 = TREE_TYPE (t1);
1153 if (!TYPE_P (t2))
1154 t2 = TREE_TYPE (t2);
1156 return types_compatible_p (t1, t2);
1159 /* Return if T has a single use. For GIMPLE, we also allow any
1160 non-SSA_NAME (ie constants) and zero uses to cope with uses
1161 that aren't linked up yet. */
1163 static inline bool
1164 single_use (tree t)
1166 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1169 /* Return true if math operations should be canonicalized,
1170 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1172 static inline bool
1173 canonicalize_math_p ()
1175 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1178 /* Return true if math operations that are beneficial only after
1179 vectorization should be canonicalized. */
1181 static inline bool
1182 canonicalize_math_after_vectorization_p ()
1184 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1187 /* Return true if we can still perform transformations that may introduce
1188 vector operations that are not supported by the target. Vector lowering
1189 normally handles those, but after that pass, it becomes unsafe. */
1191 static inline bool
1192 optimize_vectors_before_lowering_p ()
1194 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
1197 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1198 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1199 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1200 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1201 will likely be exact, while exp (log (arg0) * arg1) might be not.
1202 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1204 static bool
1205 optimize_pow_to_exp (tree arg0, tree arg1)
1207 gcc_assert (TREE_CODE (arg0) == REAL_CST);
1208 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1209 return true;
1211 if (TREE_CODE (arg1) != SSA_NAME)
1212 return true;
1214 gimple *def = SSA_NAME_DEF_STMT (arg1);
1215 gphi *phi = dyn_cast <gphi *> (def);
1216 tree cst1 = NULL_TREE;
1217 enum tree_code code = ERROR_MARK;
1218 if (!phi)
1220 if (!is_gimple_assign (def))
1221 return true;
1222 code = gimple_assign_rhs_code (def);
1223 switch (code)
1225 case PLUS_EXPR:
1226 case MINUS_EXPR:
1227 break;
1228 default:
1229 return true;
1231 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1232 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1233 return true;
1235 cst1 = gimple_assign_rhs2 (def);
1237 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1238 if (!phi)
1239 return true;
1242 tree cst2 = NULL_TREE;
1243 int n = gimple_phi_num_args (phi);
1244 for (int i = 0; i < n; i++)
1246 tree arg = PHI_ARG_DEF (phi, i);
1247 if (TREE_CODE (arg) != REAL_CST)
1248 continue;
1249 else if (cst2 == NULL_TREE)
1250 cst2 = arg;
1251 else if (!operand_equal_p (cst2, arg, 0))
1252 return true;
1255 if (cst1 && cst2)
1256 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1257 if (cst2
1258 && TREE_CODE (cst2) == REAL_CST
1259 && real_isinteger (TREE_REAL_CST_PTR (cst2),
1260 TYPE_MODE (TREE_TYPE (cst2))))
1261 return false;
1262 return true;
1265 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1266 is another division can be optimized. Don't optimize if INNER_DIV
1267 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1269 static bool
1270 optimize_successive_divisions_p (tree divisor, tree inner_div)
1272 if (!gimple_in_ssa_p (cfun))
1273 return false;
1275 imm_use_iterator imm_iter;
1276 use_operand_p use_p;
1277 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1279 gimple *use_stmt = USE_STMT (use_p);
1280 if (!is_gimple_assign (use_stmt)
1281 || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1282 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1283 continue;
1284 return false;
1286 return true;
1289 /* Return a canonical form for CODE when operating on TYPE. The idea
1290 is to remove redundant ways of representing the same operation so
1291 that code_helpers can be hashed and compared for equality.
1293 The only current canonicalization is to replace built-in functions
1294 with internal functions, in cases where internal-fn.def defines
1295 such an internal function.
1297 Note that the new code_helper cannot necessarily be used in place of
1298 the original code_helper. For example, the new code_helper might be
1299 an internal function that the target does not support. */
1301 code_helper
1302 canonicalize_code (code_helper code, tree type)
1304 if (code.is_fn_code ())
1305 return associated_internal_fn (combined_fn (code), type);
1306 return code;
1309 /* Return true if CODE is a binary operation and if CODE is commutative when
1310 operating on type TYPE. */
1312 bool
1313 commutative_binary_op_p (code_helper code, tree type)
1315 if (code.is_tree_code ())
1316 return commutative_tree_code (tree_code (code));
1317 auto cfn = combined_fn (code);
1318 return commutative_binary_fn_p (associated_internal_fn (cfn, type));
1321 /* Return true if CODE represents a ternary operation and if the first two
1322 operands are commutative when CODE is operating on TYPE. */
1324 bool
1325 commutative_ternary_op_p (code_helper code, tree type)
1327 if (code.is_tree_code ())
1328 return commutative_ternary_tree_code (tree_code (code));
1329 auto cfn = combined_fn (code);
1330 return commutative_ternary_fn_p (associated_internal_fn (cfn, type));
1333 /* If CODE is commutative in two consecutive operands, return the
1334 index of the first, otherwise return -1. */
1337 first_commutative_argument (code_helper code, tree type)
1339 if (code.is_tree_code ())
1341 auto tcode = tree_code (code);
1342 if (commutative_tree_code (tcode)
1343 || commutative_ternary_tree_code (tcode))
1344 return 0;
1345 return -1;
1347 auto cfn = combined_fn (code);
1348 return first_commutative_argument (associated_internal_fn (cfn, type));
1351 /* Return true if CODE is a binary operation that is associative when
1352 operating on type TYPE. */
1354 bool
1355 associative_binary_op_p (code_helper code, tree type)
1357 if (code.is_tree_code ())
1358 return associative_tree_code (tree_code (code));
1359 auto cfn = combined_fn (code);
1360 return associative_binary_fn_p (associated_internal_fn (cfn, type));
1363 /* Return true if the target directly supports operation CODE on type TYPE.
1364 QUERY_TYPE acts as for optab_for_tree_code. */
1366 bool
1367 directly_supported_p (code_helper code, tree type, optab_subtype query_type)
1369 if (code.is_tree_code ())
1371 direct_optab optab = optab_for_tree_code (tree_code (code), type,
1372 query_type);
1373 return (optab != unknown_optab
1374 && optab_handler (optab, TYPE_MODE (type)) != CODE_FOR_nothing);
1376 gcc_assert (query_type == optab_default
1377 || (query_type == optab_vector && VECTOR_TYPE_P (type))
1378 || (query_type == optab_scalar && !VECTOR_TYPE_P (type)));
1379 internal_fn ifn = associated_internal_fn (combined_fn (code), type);
1380 return (direct_internal_fn_p (ifn)
1381 && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED));
1384 /* A wrapper around the internal-fn.c versions of get_conditional_internal_fn
1385 for a code_helper CODE operating on type TYPE. */
1387 internal_fn
1388 get_conditional_internal_fn (code_helper code, tree type)
1390 if (code.is_tree_code ())
1391 return get_conditional_internal_fn (tree_code (code));
1392 auto cfn = combined_fn (code);
1393 return get_conditional_internal_fn (associated_internal_fn (cfn, type));