1 /* Preamble and helpers for the autogenerated gimple-match.cc file.
2 Copyright (C) 2014-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
43 #include "optabs-tree.h"
47 #include "gimple-range.h"
49 /* Forward declarations of the private auto-generated matchers.
50 They expect valueized operands in canonical order and do not
51 perform simplification of all-constant operands. */
52 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
53 code_helper
, tree
, tree
);
54 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
55 code_helper
, tree
, tree
, tree
);
56 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
57 code_helper
, tree
, tree
, tree
, tree
);
58 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
59 code_helper
, tree
, tree
, tree
, tree
, tree
);
60 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
61 code_helper
, tree
, tree
, tree
, tree
, tree
, tree
);
62 static bool gimple_resimplify1 (gimple_seq
*, gimple_match_op
*,
64 static bool gimple_resimplify2 (gimple_seq
*, gimple_match_op
*,
66 static bool gimple_resimplify3 (gimple_seq
*, gimple_match_op
*,
68 static bool gimple_resimplify4 (gimple_seq
*, gimple_match_op
*,
70 static bool gimple_resimplify5 (gimple_seq
*, gimple_match_op
*,
73 const unsigned int gimple_match_op::MAX_NUM_OPS
;
75 /* Return whether T is a constant that we'll dispatch to fold to
76 evaluate fully constant expressions. */
79 constant_for_folding (tree t
)
81 return (CONSTANT_CLASS_P (t
)
82 /* The following is only interesting to string builtins. */
83 || (TREE_CODE (t
) == ADDR_EXPR
84 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
87 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
88 operation. Return true on success, storing the new operation in NEW_OP. */
91 convert_conditional_op (gimple_match_op
*orig_op
,
92 gimple_match_op
*new_op
)
95 if (orig_op
->code
.is_tree_code ())
96 ifn
= get_conditional_internal_fn ((tree_code
) orig_op
->code
);
99 auto cfn
= combined_fn (orig_op
->code
);
100 if (!internal_fn_p (cfn
))
102 ifn
= get_conditional_internal_fn (as_internal_fn (cfn
));
106 unsigned int num_ops
= orig_op
->num_ops
;
107 new_op
->set_op (as_combined_fn (ifn
), orig_op
->type
, num_ops
+ 2);
108 new_op
->ops
[0] = orig_op
->cond
.cond
;
109 for (unsigned int i
= 0; i
< num_ops
; ++i
)
110 new_op
->ops
[i
+ 1] = orig_op
->ops
[i
];
111 tree else_value
= orig_op
->cond
.else_value
;
113 else_value
= targetm
.preferred_else_value (ifn
, orig_op
->type
,
114 num_ops
, orig_op
->ops
);
115 new_op
->ops
[num_ops
+ 1] = else_value
;
119 /* RES_OP is the result of a simplification. If it is conditional,
120 try to replace it with the equivalent UNCOND form, such as an
121 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
122 result of the replacement if appropriate, adding any new statements to
123 SEQ and using VALUEIZE as the valueization function. Return true if
124 this resimplification occurred and resulted in at least one change. */
127 maybe_resimplify_conditional_op (gimple_seq
*seq
, gimple_match_op
*res_op
,
128 tree (*valueize
) (tree
))
130 if (!res_op
->cond
.cond
)
133 if (!res_op
->cond
.else_value
134 && res_op
->code
.is_tree_code ())
136 /* The "else" value doesn't matter. If the "then" value is a
137 gimple value, just use it unconditionally. This isn't a
138 simplification in itself, since there was no operation to
139 build in the first place. */
140 if (gimple_simplified_result_is_gimple_val (res_op
))
142 res_op
->cond
.cond
= NULL_TREE
;
146 /* Likewise if the operation would not trap. */
147 bool honor_trapv
= (INTEGRAL_TYPE_P (res_op
->type
)
148 && TYPE_OVERFLOW_TRAPS (res_op
->type
));
149 tree_code op_code
= (tree_code
) res_op
->code
;
152 /* COND_EXPR will trap if, and only if, the condition
153 traps and hence we have to check this. For all other operations, we
154 don't need to consider the operands. */
155 if (op_code
== COND_EXPR
)
156 op_could_trap
= generic_expr_could_trap_p (res_op
->ops
[0]);
158 op_could_trap
= operation_could_trap_p ((tree_code
) res_op
->code
,
159 FLOAT_TYPE_P (res_op
->type
),
161 res_op
->op_or_null (1));
165 res_op
->cond
.cond
= NULL_TREE
;
170 /* If the "then" value is a gimple value and the "else" value matters,
171 create a VEC_COND_EXPR between them, then see if it can be further
173 gimple_match_op new_op
;
174 if (res_op
->cond
.else_value
175 && VECTOR_TYPE_P (res_op
->type
)
176 && gimple_simplified_result_is_gimple_val (res_op
))
178 new_op
.set_op (VEC_COND_EXPR
, res_op
->type
,
179 res_op
->cond
.cond
, res_op
->ops
[0],
180 res_op
->cond
.else_value
);
182 return gimple_resimplify3 (seq
, res_op
, valueize
);
185 /* Otherwise try rewriting the operation as an IFN_COND_* call.
186 Again, this isn't a simplification in itself, since it's what
187 RES_OP already described. */
188 if (convert_conditional_op (res_op
, &new_op
))
194 /* Helper that matches and simplifies the toplevel result from
195 a gimple_simplify run (where we don't want to build
196 a stmt in case it's used in in-place folding). Replaces
197 RES_OP with a simplified and/or canonicalized result and
198 returns whether any change was made. */
201 gimple_resimplify1 (gimple_seq
*seq
, gimple_match_op
*res_op
,
202 tree (*valueize
)(tree
))
204 if (constant_for_folding (res_op
->ops
[0]))
206 tree tem
= NULL_TREE
;
207 if (res_op
->code
.is_tree_code ())
209 auto code
= tree_code (res_op
->code
);
210 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
211 && TREE_CODE_LENGTH (code
) == 1)
212 tem
= const_unop (code
, res_op
->type
, res_op
->ops
[0]);
215 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
218 && CONSTANT_CLASS_P (tem
))
220 if (TREE_OVERFLOW_P (tem
))
221 tem
= drop_tree_overflow (tem
);
222 res_op
->set_value (tem
);
223 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
228 /* Limit recursion, there are cases like PR80887 and others, for
229 example when value-numbering presents us with unfolded expressions
230 that we are really not prepared to handle without eventual
231 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
232 itself as available expression. */
233 static unsigned depth
;
236 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
237 fprintf (dump_file
, "Aborting expression simplification due to "
243 gimple_match_op
res_op2 (*res_op
);
244 if (gimple_simplify (&res_op2
, seq
, valueize
,
245 res_op
->code
, res_op
->type
, res_op
->ops
[0]))
253 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
259 /* Helper that matches and simplifies the toplevel result from
260 a gimple_simplify run (where we don't want to build
261 a stmt in case it's used in in-place folding). Replaces
262 RES_OP with a simplified and/or canonicalized result and
263 returns whether any change was made. */
266 gimple_resimplify2 (gimple_seq
*seq
, gimple_match_op
*res_op
,
267 tree (*valueize
)(tree
))
269 if (constant_for_folding (res_op
->ops
[0])
270 && constant_for_folding (res_op
->ops
[1]))
272 tree tem
= NULL_TREE
;
273 if (res_op
->code
.is_tree_code ())
275 auto code
= tree_code (res_op
->code
);
276 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
277 && TREE_CODE_LENGTH (code
) == 2)
278 tem
= const_binop (code
, res_op
->type
,
279 res_op
->ops
[0], res_op
->ops
[1]);
282 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
283 res_op
->ops
[0], res_op
->ops
[1]);
285 && CONSTANT_CLASS_P (tem
))
287 if (TREE_OVERFLOW_P (tem
))
288 tem
= drop_tree_overflow (tem
);
289 res_op
->set_value (tem
);
290 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
295 /* Canonicalize operand order. */
296 bool canonicalized
= false;
298 = (res_op
->code
.is_tree_code ()
299 && TREE_CODE_CLASS (tree_code (res_op
->code
)) == tcc_comparison
);
300 if ((is_comparison
|| commutative_binary_op_p (res_op
->code
, res_op
->type
))
301 && tree_swap_operands_p (res_op
->ops
[0], res_op
->ops
[1]))
303 std::swap (res_op
->ops
[0], res_op
->ops
[1]);
305 res_op
->code
= swap_tree_comparison (tree_code (res_op
->code
));
306 canonicalized
= true;
309 /* Limit recursion, see gimple_resimplify1. */
310 static unsigned depth
;
313 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
314 fprintf (dump_file
, "Aborting expression simplification due to "
320 gimple_match_op
res_op2 (*res_op
);
321 if (gimple_simplify (&res_op2
, seq
, valueize
,
322 res_op
->code
, res_op
->type
,
323 res_op
->ops
[0], res_op
->ops
[1]))
331 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
334 return canonicalized
;
337 /* Helper that matches and simplifies the toplevel result from
338 a gimple_simplify run (where we don't want to build
339 a stmt in case it's used in in-place folding). Replaces
340 RES_OP with a simplified and/or canonicalized result and
341 returns whether any change was made. */
344 gimple_resimplify3 (gimple_seq
*seq
, gimple_match_op
*res_op
,
345 tree (*valueize
)(tree
))
347 if (constant_for_folding (res_op
->ops
[0])
348 && constant_for_folding (res_op
->ops
[1])
349 && constant_for_folding (res_op
->ops
[2]))
351 tree tem
= NULL_TREE
;
352 if (res_op
->code
.is_tree_code ())
354 auto code
= tree_code (res_op
->code
);
355 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
356 && TREE_CODE_LENGTH (code
) == 3)
357 tem
= fold_ternary
/*_to_constant*/ (code
, res_op
->type
,
358 res_op
->ops
[0], res_op
->ops
[1],
362 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
363 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]);
365 && CONSTANT_CLASS_P (tem
))
367 if (TREE_OVERFLOW_P (tem
))
368 tem
= drop_tree_overflow (tem
);
369 res_op
->set_value (tem
);
370 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
375 /* Canonicalize operand order. */
376 bool canonicalized
= false;
377 int argno
= first_commutative_argument (res_op
->code
, res_op
->type
);
379 && tree_swap_operands_p (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]))
381 std::swap (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]);
382 canonicalized
= true;
385 /* Limit recursion, see gimple_resimplify1. */
386 static unsigned depth
;
389 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
390 fprintf (dump_file
, "Aborting expression simplification due to "
396 gimple_match_op
res_op2 (*res_op
);
397 if (gimple_simplify (&res_op2
, seq
, valueize
,
398 res_op
->code
, res_op
->type
,
399 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]))
407 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
410 return canonicalized
;
413 /* Helper that matches and simplifies the toplevel result from
414 a gimple_simplify run (where we don't want to build
415 a stmt in case it's used in in-place folding). Replaces
416 RES_OP with a simplified and/or canonicalized result and
417 returns whether any change was made. */
420 gimple_resimplify4 (gimple_seq
*seq
, gimple_match_op
*res_op
,
421 tree (*valueize
)(tree
))
423 /* No constant folding is defined for four-operand functions. */
425 /* Canonicalize operand order. */
426 bool canonicalized
= false;
427 int argno
= first_commutative_argument (res_op
->code
, res_op
->type
);
429 && tree_swap_operands_p (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]))
431 std::swap (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]);
432 canonicalized
= true;
435 /* Limit recursion, see gimple_resimplify1. */
436 static unsigned depth
;
439 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
440 fprintf (dump_file
, "Aborting expression simplification due to "
446 gimple_match_op
res_op2 (*res_op
);
447 if (gimple_simplify (&res_op2
, seq
, valueize
,
448 res_op
->code
, res_op
->type
,
449 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2],
458 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
461 return canonicalized
;
464 /* Helper that matches and simplifies the toplevel result from
465 a gimple_simplify run (where we don't want to build
466 a stmt in case it's used in in-place folding). Replaces
467 RES_OP with a simplified and/or canonicalized result and
468 returns whether any change was made. */
471 gimple_resimplify5 (gimple_seq
*seq
, gimple_match_op
*res_op
,
472 tree (*valueize
)(tree
))
474 /* No constant folding is defined for five-operand functions. */
476 /* Canonicalize operand order. */
477 bool canonicalized
= false;
478 int argno
= first_commutative_argument (res_op
->code
, res_op
->type
);
480 && tree_swap_operands_p (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]))
482 std::swap (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]);
483 canonicalized
= true;
486 gimple_match_op
res_op2 (*res_op
);
487 if (gimple_simplify (&res_op2
, seq
, valueize
,
488 res_op
->code
, res_op
->type
,
489 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2],
490 res_op
->ops
[3], res_op
->ops
[4]))
496 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
499 return canonicalized
;
502 /* Match and simplify the toplevel valueized operation THIS.
503 Replaces THIS with a simplified and/or canonicalized result and
504 returns whether any change was made. */
507 gimple_match_op::resimplify (gimple_seq
*seq
, tree (*valueize
)(tree
))
512 return gimple_resimplify1 (seq
, this, valueize
);
514 return gimple_resimplify2 (seq
, this, valueize
);
516 return gimple_resimplify3 (seq
, this, valueize
);
518 return gimple_resimplify4 (seq
, this, valueize
);
520 return gimple_resimplify5 (seq
, this, valueize
);
526 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
527 build a GENERIC tree for that expression and update RES_OP accordingly. */
530 maybe_build_generic_op (gimple_match_op
*res_op
)
532 tree_code code
= (tree_code
) res_op
->code
;
538 case VIEW_CONVERT_EXPR
:
539 val
= build1 (code
, res_op
->type
, res_op
->ops
[0]);
540 res_op
->set_value (val
);
543 val
= build3 (code
, res_op
->type
, res_op
->ops
[0], res_op
->ops
[1],
545 REF_REVERSE_STORAGE_ORDER (val
) = res_op
->reverse
;
546 res_op
->set_value (val
);
552 tree (*mprts_hook
) (gimple_match_op
*);
554 /* Try to build RES_OP, which is known to be a call to FN. Return null
555 if the target doesn't support the function. */
558 build_call_internal (internal_fn fn
, gimple_match_op
*res_op
)
560 if (direct_internal_fn_p (fn
))
562 tree_pair types
= direct_internal_fn_types (fn
, res_op
->type
,
564 if (!direct_internal_fn_supported_p (fn
, types
, OPTIMIZE_FOR_BOTH
))
567 return gimple_build_call_internal (fn
, res_op
->num_ops
,
568 res_op
->op_or_null (0),
569 res_op
->op_or_null (1),
570 res_op
->op_or_null (2),
571 res_op
->op_or_null (3),
572 res_op
->op_or_null (4));
575 /* Push the exploded expression described by RES_OP as a statement to
576 SEQ if necessary and return a gimple value denoting the value of the
577 expression. If RES is not NULL then the result will be always RES
578 and even gimple values are pushed to SEQ. */
581 maybe_push_res_to_seq (gimple_match_op
*res_op
, gimple_seq
*seq
, tree res
)
583 tree
*ops
= res_op
->ops
;
584 unsigned num_ops
= res_op
->num_ops
;
586 /* The caller should have converted conditional operations into an UNCOND
587 form and resimplified as appropriate. The conditional form only
588 survives this far if that conversion failed. */
589 if (res_op
->cond
.cond
)
592 if (res_op
->code
.is_tree_code ())
595 && gimple_simplified_result_is_gimple_val (res_op
))
599 tree tem
= mprts_hook (res_op
);
608 /* Play safe and do not allow abnormals to be mentioned in
609 newly created statements. */
610 for (unsigned int i
= 0; i
< num_ops
; ++i
)
611 if (TREE_CODE (ops
[i
]) == SSA_NAME
612 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
]))
615 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
616 for (unsigned int i
= 0; i
< 2; ++i
)
617 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
618 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
)))
621 if (res_op
->code
.is_tree_code ())
623 auto code
= tree_code (res_op
->code
);
626 if (gimple_in_ssa_p (cfun
))
627 res
= make_ssa_name (res_op
->type
);
629 res
= create_tmp_reg (res_op
->type
);
631 maybe_build_generic_op (res_op
);
632 gimple
*new_stmt
= gimple_build_assign (res
, code
,
633 res_op
->op_or_null (0),
634 res_op
->op_or_null (1),
635 res_op
->op_or_null (2));
636 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
641 gcc_assert (num_ops
!= 0);
642 auto fn
= combined_fn (res_op
->code
);
643 gcall
*new_stmt
= NULL
;
644 if (internal_fn_p (fn
))
646 /* Generate the given function if we can. */
647 internal_fn ifn
= as_internal_fn (fn
);
648 new_stmt
= build_call_internal (ifn
, res_op
);
654 /* Find the function we want to call. */
655 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
659 /* We can't and should not emit calls to non-const functions. */
660 if (!(flags_from_decl_or_type (decl
) & ECF_CONST
))
663 new_stmt
= gimple_build_call (decl
, num_ops
,
664 res_op
->op_or_null (0),
665 res_op
->op_or_null (1),
666 res_op
->op_or_null (2),
667 res_op
->op_or_null (3),
668 res_op
->op_or_null (4));
672 if (gimple_in_ssa_p (cfun
))
673 res
= make_ssa_name (res_op
->type
);
675 res
= create_tmp_reg (res_op
->type
);
677 gimple_call_set_lhs (new_stmt
, res
);
678 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
684 /* Public API overloads follow for operation being tree_code or
685 built_in_function and for one to three operands or arguments.
686 They return NULL_TREE if nothing could be simplified or
687 the resulting simplified value with parts pushed to SEQ.
688 If SEQ is NULL then if the simplification needs to create
689 new stmts it will fail. If VALUEIZE is non-NULL then all
690 SSA names will be valueized using that hook prior to
691 applying simplifications. */
696 gimple_simplify (enum tree_code code
, tree type
,
698 gimple_seq
*seq
, tree (*valueize
)(tree
))
700 if (constant_for_folding (op0
))
702 tree res
= const_unop (code
, type
, op0
);
704 && CONSTANT_CLASS_P (res
))
708 gimple_match_op res_op
;
709 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
))
711 return maybe_push_res_to_seq (&res_op
, seq
);
717 gimple_simplify (enum tree_code code
, tree type
,
719 gimple_seq
*seq
, tree (*valueize
)(tree
))
721 if (constant_for_folding (op0
) && constant_for_folding (op1
))
723 tree res
= const_binop (code
, type
, op0
, op1
);
725 && CONSTANT_CLASS_P (res
))
729 /* Canonicalize operand order both for matching and fallback stmt
731 if ((commutative_tree_code (code
)
732 || TREE_CODE_CLASS (code
) == tcc_comparison
)
733 && tree_swap_operands_p (op0
, op1
))
735 std::swap (op0
, op1
);
736 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
737 code
= swap_tree_comparison (code
);
740 gimple_match_op res_op
;
741 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
))
743 return maybe_push_res_to_seq (&res_op
, seq
);
749 gimple_simplify (enum tree_code code
, tree type
,
750 tree op0
, tree op1
, tree op2
,
751 gimple_seq
*seq
, tree (*valueize
)(tree
))
753 if (constant_for_folding (op0
) && constant_for_folding (op1
)
754 && constant_for_folding (op2
))
756 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
758 && CONSTANT_CLASS_P (res
))
762 /* Canonicalize operand order both for matching and fallback stmt
764 if (commutative_ternary_tree_code (code
)
765 && tree_swap_operands_p (op0
, op1
))
766 std::swap (op0
, op1
);
768 gimple_match_op res_op
;
769 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
, op2
))
771 return maybe_push_res_to_seq (&res_op
, seq
);
774 /* Builtin or internal function with one argument. */
777 gimple_simplify (combined_fn fn
, tree type
,
779 gimple_seq
*seq
, tree (*valueize
)(tree
))
781 if (constant_for_folding (arg0
))
783 tree res
= fold_const_call (fn
, type
, arg0
);
784 if (res
&& CONSTANT_CLASS_P (res
))
788 gimple_match_op res_op
;
789 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
))
791 return maybe_push_res_to_seq (&res_op
, seq
);
794 /* Builtin or internal function with two arguments. */
797 gimple_simplify (combined_fn fn
, tree type
,
798 tree arg0
, tree arg1
,
799 gimple_seq
*seq
, tree (*valueize
)(tree
))
801 if (constant_for_folding (arg0
)
802 && constant_for_folding (arg1
))
804 tree res
= fold_const_call (fn
, type
, arg0
, arg1
);
805 if (res
&& CONSTANT_CLASS_P (res
))
809 gimple_match_op res_op
;
810 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
))
812 return maybe_push_res_to_seq (&res_op
, seq
);
815 /* Builtin or internal function with three arguments. */
818 gimple_simplify (combined_fn fn
, tree type
,
819 tree arg0
, tree arg1
, tree arg2
,
820 gimple_seq
*seq
, tree (*valueize
)(tree
))
822 if (constant_for_folding (arg0
)
823 && constant_for_folding (arg1
)
824 && constant_for_folding (arg2
))
826 tree res
= fold_const_call (fn
, type
, arg0
, arg1
, arg2
);
827 if (res
&& CONSTANT_CLASS_P (res
))
831 gimple_match_op res_op
;
832 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
, arg2
))
834 return maybe_push_res_to_seq (&res_op
, seq
);
837 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
838 VALUEIZED to true if valueization changed OP. */
841 do_valueize (tree op
, tree (*valueize
)(tree
), bool &valueized
)
843 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
845 tree tem
= valueize (op
);
846 if (tem
&& tem
!= op
)
855 /* If RES_OP is a call to a conditional internal function, try simplifying
856 the associated unconditional operation and using the result to build
857 a new conditional operation. For example, if RES_OP is:
859 IFN_COND_ADD (COND, A, B, ELSE)
861 try simplifying (plus A B) and using the result to build a replacement
862 for the whole IFN_COND_ADD.
864 Return true if this approach led to a simplification, otherwise leave
865 RES_OP unchanged (and so suitable for other simplifications). When
866 returning true, add any new statements to SEQ and use VALUEIZE as the
867 valueization function.
869 RES_OP is known to be a call to IFN. */
872 try_conditional_simplification (internal_fn ifn
, gimple_match_op
*res_op
,
873 gimple_seq
*seq
, tree (*valueize
) (tree
))
876 tree_code code
= conditional_internal_fn_code (ifn
);
877 if (code
!= ERROR_MARK
)
881 ifn
= get_unconditional_internal_fn (ifn
);
884 op
= as_combined_fn (ifn
);
887 unsigned int num_ops
= res_op
->num_ops
;
888 gimple_match_op
cond_op (gimple_match_cond (res_op
->ops
[0],
889 res_op
->ops
[num_ops
- 1]),
890 op
, res_op
->type
, num_ops
- 2);
892 memcpy (cond_op
.ops
, res_op
->ops
+ 1, (num_ops
- 1) * sizeof *cond_op
.ops
);
896 if (!gimple_resimplify1 (seq
, &cond_op
, valueize
))
900 if (!gimple_resimplify2 (seq
, &cond_op
, valueize
))
904 if (!gimple_resimplify3 (seq
, &cond_op
, valueize
))
911 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
915 /* Common subroutine of gimple_extract_op and gimple_simplify. Try to
916 describe STMT in RES_OP, returning true on success. Before recording
919 - VALUEIZE_CONDITION for a COND_EXPR condition
920 - VALUEIZE_OP for every other top-level operand
922 Both routines take a tree argument and returns a tree. */
924 template<typename ValueizeOp
, typename ValueizeCondition
>
926 gimple_extract (gimple
*stmt
, gimple_match_op
*res_op
,
927 ValueizeOp valueize_op
,
928 ValueizeCondition valueize_condition
)
930 switch (gimple_code (stmt
))
934 enum tree_code code
= gimple_assign_rhs_code (stmt
);
935 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
936 switch (gimple_assign_rhs_class (stmt
))
938 case GIMPLE_SINGLE_RHS
:
939 if (code
== REALPART_EXPR
940 || code
== IMAGPART_EXPR
941 || code
== VIEW_CONVERT_EXPR
)
943 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
944 res_op
->set_op (code
, type
, valueize_op (op0
));
947 else if (code
== BIT_FIELD_REF
)
949 tree rhs1
= gimple_assign_rhs1 (stmt
);
950 tree op0
= valueize_op (TREE_OPERAND (rhs1
, 0));
951 res_op
->set_op (code
, type
, op0
,
952 TREE_OPERAND (rhs1
, 1),
953 TREE_OPERAND (rhs1
, 2),
954 REF_REVERSE_STORAGE_ORDER (rhs1
));
957 else if (code
== SSA_NAME
)
959 tree op0
= gimple_assign_rhs1 (stmt
);
960 res_op
->set_op (TREE_CODE (op0
), type
, valueize_op (op0
));
964 case GIMPLE_UNARY_RHS
:
966 tree rhs1
= gimple_assign_rhs1 (stmt
);
967 res_op
->set_op (code
, type
, valueize_op (rhs1
));
970 case GIMPLE_BINARY_RHS
:
972 tree rhs1
= valueize_op (gimple_assign_rhs1 (stmt
));
973 tree rhs2
= valueize_op (gimple_assign_rhs2 (stmt
));
974 res_op
->set_op (code
, type
, rhs1
, rhs2
);
977 case GIMPLE_TERNARY_RHS
:
979 tree rhs1
= gimple_assign_rhs1 (stmt
);
980 if (code
== COND_EXPR
&& COMPARISON_CLASS_P (rhs1
))
981 rhs1
= valueize_condition (rhs1
);
983 rhs1
= valueize_op (rhs1
);
984 tree rhs2
= valueize_op (gimple_assign_rhs2 (stmt
));
985 tree rhs3
= valueize_op (gimple_assign_rhs3 (stmt
));
986 res_op
->set_op (code
, type
, rhs1
, rhs2
, rhs3
);
996 /* ??? This way we can't simplify calls with side-effects. */
997 if (gimple_call_lhs (stmt
) != NULL_TREE
998 && gimple_call_num_args (stmt
) >= 1
999 && gimple_call_num_args (stmt
) <= 5)
1002 if (gimple_call_internal_p (stmt
))
1003 cfn
= as_combined_fn (gimple_call_internal_fn (stmt
));
1006 tree fn
= gimple_call_fn (stmt
);
1010 fn
= valueize_op (fn
);
1011 if (TREE_CODE (fn
) != ADDR_EXPR
1012 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
1015 tree decl
= TREE_OPERAND (fn
, 0);
1016 if (DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_NORMAL
1017 || !gimple_builtin_call_types_compatible_p (stmt
, decl
))
1020 cfn
= as_combined_fn (DECL_FUNCTION_CODE (decl
));
1023 unsigned int num_args
= gimple_call_num_args (stmt
);
1024 res_op
->set_op (cfn
, TREE_TYPE (gimple_call_lhs (stmt
)), num_args
);
1025 for (unsigned i
= 0; i
< num_args
; ++i
)
1026 res_op
->ops
[i
] = valueize_op (gimple_call_arg (stmt
, i
));
1033 tree lhs
= valueize_op (gimple_cond_lhs (stmt
));
1034 tree rhs
= valueize_op (gimple_cond_rhs (stmt
));
1035 res_op
->set_op (gimple_cond_code (stmt
), boolean_type_node
, lhs
, rhs
);
1046 /* Try to describe STMT in RES_OP, returning true on success.
1047 For GIMPLE_CONDs, describe the condition that is being tested.
1048 For GIMPLE_ASSIGNs, describe the rhs of the assignment.
1049 For GIMPLE_CALLs, describe the call. */
1052 gimple_extract_op (gimple
*stmt
, gimple_match_op
*res_op
)
1054 auto nop
= [](tree op
) { return op
; };
1055 return gimple_extract (stmt
, res_op
, nop
, nop
);
1058 /* The main STMT based simplification entry. It is used by the fold_stmt
1059 and the fold_stmt_to_constant APIs. */
1062 gimple_simplify (gimple
*stmt
, gimple_match_op
*res_op
, gimple_seq
*seq
,
1063 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
1065 bool valueized
= false;
1066 auto valueize_op
= [&](tree op
)
1068 return do_valueize (op
, top_valueize
, valueized
);
1070 auto valueize_condition
= [&](tree op
) -> tree
1072 bool cond_valueized
= false;
1073 tree lhs
= do_valueize (TREE_OPERAND (op
, 0), top_valueize
,
1075 tree rhs
= do_valueize (TREE_OPERAND (op
, 1), top_valueize
,
1077 gimple_match_op
res_op2 (res_op
->cond
, TREE_CODE (op
),
1078 TREE_TYPE (op
), lhs
, rhs
);
1079 if ((gimple_resimplify2 (seq
, &res_op2
, valueize
)
1081 && res_op2
.code
.is_tree_code ())
1083 auto code
= tree_code (res_op2
.code
);
1084 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1087 return build2 (code
, TREE_TYPE (op
),
1088 res_op2
.ops
[0], res_op2
.ops
[1]);
1090 else if (code
== SSA_NAME
1091 || code
== INTEGER_CST
1092 || code
== VECTOR_CST
)
1095 return res_op2
.ops
[0];
1098 return valueize_op (op
);
1101 if (!gimple_extract (stmt
, res_op
, valueize_op
, valueize_condition
))
1104 if (res_op
->code
.is_internal_fn ())
1106 internal_fn ifn
= internal_fn (res_op
->code
);
1107 if (try_conditional_simplification (ifn
, res_op
, seq
, valueize
))
1111 if (!res_op
->reverse
1113 && res_op
->resimplify (seq
, valueize
))
1119 /* Helper for the autogenerated code, valueize OP. */
1122 do_valueize (tree (*valueize
)(tree
), tree op
)
1124 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
1126 tree tem
= valueize (op
);
1133 /* Helper for the autogenerated code, get at the definition of NAME when
1134 VALUEIZE allows that. */
1137 get_def (tree (*valueize
)(tree
), tree name
)
1139 if (valueize
&& ! valueize (name
))
1141 return SSA_NAME_DEF_STMT (name
);
1144 /* Routine to determine if the types T1 and T2 are effectively
1145 the same for GIMPLE. If T1 or T2 is not a type, the test
1146 applies to their TREE_TYPE. */
1149 types_match (tree t1
, tree t2
)
1152 t1
= TREE_TYPE (t1
);
1154 t2
= TREE_TYPE (t2
);
1156 return types_compatible_p (t1
, t2
);
1159 /* Return if T has a single use. For GIMPLE, we also allow any
1160 non-SSA_NAME (ie constants) and zero uses to cope with uses
1161 that aren't linked up yet. */
1164 single_use (const_tree
) ATTRIBUTE_PURE
;
1167 single_use (const_tree t
)
1169 if (TREE_CODE (t
) != SSA_NAME
)
1172 /* Inline return has_zero_uses (t) || has_single_use (t); */
1173 const ssa_use_operand_t
*const head
= &(SSA_NAME_IMM_USE_NODE (t
));
1174 const ssa_use_operand_t
*ptr
;
1175 bool single
= false;
1177 for (ptr
= head
->next
; ptr
!= head
; ptr
= ptr
->next
)
1178 if (USE_STMT(ptr
) && !is_gimple_debug (USE_STMT (ptr
)))
1187 /* Return true if math operations should be canonicalized,
1188 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1191 canonicalize_math_p ()
1193 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_opt_math
) == 0;
1196 /* Return true if math operations that are beneficial only after
1197 vectorization should be canonicalized. */
1200 canonicalize_math_after_vectorization_p ()
1202 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) != 0;
1205 /* Return true if we can still perform transformations that may introduce
1206 vector operations that are not supported by the target. Vector lowering
1207 normally handles those, but after that pass, it becomes unsafe. */
1210 optimize_vectors_before_lowering_p ()
1212 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) == 0;
1215 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1216 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1217 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1218 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1219 will likely be exact, while exp (log (arg0) * arg1) might be not.
1220 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1223 optimize_pow_to_exp (tree arg0
, tree arg1
)
1225 gcc_assert (TREE_CODE (arg0
) == REAL_CST
);
1226 if (!real_isinteger (TREE_REAL_CST_PTR (arg0
), TYPE_MODE (TREE_TYPE (arg0
))))
1229 if (TREE_CODE (arg1
) != SSA_NAME
)
1232 gimple
*def
= SSA_NAME_DEF_STMT (arg1
);
1233 gphi
*phi
= dyn_cast
<gphi
*> (def
);
1234 tree cst1
= NULL_TREE
;
1235 enum tree_code code
= ERROR_MARK
;
1238 if (!is_gimple_assign (def
))
1240 code
= gimple_assign_rhs_code (def
);
1249 if (TREE_CODE (gimple_assign_rhs1 (def
)) != SSA_NAME
1250 || TREE_CODE (gimple_assign_rhs2 (def
)) != REAL_CST
)
1253 cst1
= gimple_assign_rhs2 (def
);
1255 phi
= dyn_cast
<gphi
*> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def
)));
1260 tree cst2
= NULL_TREE
;
1261 int n
= gimple_phi_num_args (phi
);
1262 for (int i
= 0; i
< n
; i
++)
1264 tree arg
= PHI_ARG_DEF (phi
, i
);
1265 if (TREE_CODE (arg
) != REAL_CST
)
1267 else if (cst2
== NULL_TREE
)
1269 else if (!operand_equal_p (cst2
, arg
, 0))
1274 cst2
= const_binop (code
, TREE_TYPE (cst2
), cst2
, cst1
);
1276 && TREE_CODE (cst2
) == REAL_CST
1277 && real_isinteger (TREE_REAL_CST_PTR (cst2
),
1278 TYPE_MODE (TREE_TYPE (cst2
))))
1283 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1284 is another division can be optimized. Don't optimize if INNER_DIV
1285 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1288 optimize_successive_divisions_p (tree divisor
, tree inner_div
)
1290 if (!gimple_in_ssa_p (cfun
))
1293 imm_use_iterator imm_iter
;
1294 use_operand_p use_p
;
1295 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, inner_div
)
1297 gimple
*use_stmt
= USE_STMT (use_p
);
1298 if (!is_gimple_assign (use_stmt
)
1299 || gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
1300 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), divisor
, 0))
1307 /* Return a canonical form for CODE when operating on TYPE. The idea
1308 is to remove redundant ways of representing the same operation so
1309 that code_helpers can be hashed and compared for equality.
1311 The only current canonicalization is to replace built-in functions
1312 with internal functions, in cases where internal-fn.def defines
1313 such an internal function.
1315 Note that the new code_helper cannot necessarily be used in place of
1316 the original code_helper. For example, the new code_helper might be
1317 an internal function that the target does not support. */
1320 canonicalize_code (code_helper code
, tree type
)
1322 if (code
.is_fn_code ())
1323 return associated_internal_fn (combined_fn (code
), type
);
1327 /* Return true if CODE is a binary operation and if CODE is commutative when
1328 operating on type TYPE. */
1331 commutative_binary_op_p (code_helper code
, tree type
)
1333 if (code
.is_tree_code ())
1334 return commutative_tree_code (tree_code (code
));
1335 auto cfn
= combined_fn (code
);
1336 return commutative_binary_fn_p (associated_internal_fn (cfn
, type
));
1339 /* Return true if CODE represents a ternary operation and if the first two
1340 operands are commutative when CODE is operating on TYPE. */
1343 commutative_ternary_op_p (code_helper code
, tree type
)
1345 if (code
.is_tree_code ())
1346 return commutative_ternary_tree_code (tree_code (code
));
1347 auto cfn
= combined_fn (code
);
1348 return commutative_ternary_fn_p (associated_internal_fn (cfn
, type
));
1351 /* If CODE is commutative in two consecutive operands, return the
1352 index of the first, otherwise return -1. */
1355 first_commutative_argument (code_helper code
, tree type
)
1357 if (code
.is_tree_code ())
1359 auto tcode
= tree_code (code
);
1360 if (commutative_tree_code (tcode
)
1361 || commutative_ternary_tree_code (tcode
))
1365 auto cfn
= combined_fn (code
);
1366 return first_commutative_argument (associated_internal_fn (cfn
, type
));
1369 /* Return true if CODE is a binary operation that is associative when
1370 operating on type TYPE. */
1373 associative_binary_op_p (code_helper code
, tree type
)
1375 if (code
.is_tree_code ())
1376 return associative_tree_code (tree_code (code
));
1377 auto cfn
= combined_fn (code
);
1378 return associative_binary_fn_p (associated_internal_fn (cfn
, type
));
1381 /* Return true if the target directly supports operation CODE on type TYPE.
1382 QUERY_TYPE acts as for optab_for_tree_code. */
1385 directly_supported_p (code_helper code
, tree type
, optab_subtype query_type
)
1387 if (code
.is_tree_code ())
1389 direct_optab optab
= optab_for_tree_code (tree_code (code
), type
,
1391 return (optab
!= unknown_optab
1392 && optab_handler (optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
);
1394 gcc_assert (query_type
== optab_default
1395 || (query_type
== optab_vector
&& VECTOR_TYPE_P (type
))
1396 || (query_type
== optab_scalar
&& !VECTOR_TYPE_P (type
)));
1397 internal_fn ifn
= associated_internal_fn (combined_fn (code
), type
);
1398 return (direct_internal_fn_p (ifn
)
1399 && direct_internal_fn_supported_p (ifn
, type
, OPTIMIZE_FOR_SPEED
));
1402 /* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn
1403 for a code_helper CODE operating on type TYPE. */
1406 get_conditional_internal_fn (code_helper code
, tree type
)
1408 if (code
.is_tree_code ())
1409 return get_conditional_internal_fn (tree_code (code
));
1410 auto cfn
= combined_fn (code
);
1411 return get_conditional_internal_fn (associated_internal_fn (cfn
, type
));