1 /* Preamble and helpers for the autogenerated gimple-match.cc file.
2 Copyright (C) 2014-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
39 #include "gimple-match.h"
40 #include "tree-pass.h"
41 #include "internal-fn.h"
42 #include "case-cfn-macros.h"
44 #include "optabs-tree.h"
48 #include "gimple-range.h"
49 #include "langhooks.h"
51 /* Forward declarations of the private auto-generated matchers.
52 They expect valueized operands in canonical order and do not
53 perform simplification of all-constant operands. */
54 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
55 code_helper
, tree
, tree
);
56 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
57 code_helper
, tree
, tree
, tree
);
58 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
59 code_helper
, tree
, tree
, tree
, tree
);
60 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
61 code_helper
, tree
, tree
, tree
, tree
, tree
);
62 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
63 code_helper
, tree
, tree
, tree
, tree
, tree
, tree
);
64 static bool gimple_resimplify1 (gimple_seq
*, gimple_match_op
*,
66 static bool gimple_resimplify2 (gimple_seq
*, gimple_match_op
*,
68 static bool gimple_resimplify3 (gimple_seq
*, gimple_match_op
*,
70 static bool gimple_resimplify4 (gimple_seq
*, gimple_match_op
*,
72 static bool gimple_resimplify5 (gimple_seq
*, gimple_match_op
*,
75 const unsigned int gimple_match_op::MAX_NUM_OPS
;
77 /* Return whether T is a constant that we'll dispatch to fold to
78 evaluate fully constant expressions. */
81 constant_for_folding (tree t
)
83 return (CONSTANT_CLASS_P (t
)
84 /* The following is only interesting to string builtins. */
85 || (TREE_CODE (t
) == ADDR_EXPR
86 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
89 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
90 operation. Return true on success, storing the new operation in NEW_OP. */
93 convert_conditional_op (gimple_match_op
*orig_op
,
94 gimple_match_op
*new_op
)
97 if (orig_op
->code
.is_tree_code ())
98 ifn
= get_conditional_internal_fn ((tree_code
) orig_op
->code
);
101 auto cfn
= combined_fn (orig_op
->code
);
102 if (!internal_fn_p (cfn
))
104 ifn
= get_conditional_internal_fn (as_internal_fn (cfn
));
108 unsigned int num_ops
= orig_op
->num_ops
;
109 new_op
->set_op (as_combined_fn (ifn
), orig_op
->type
, num_ops
+ 2);
110 new_op
->ops
[0] = orig_op
->cond
.cond
;
111 for (unsigned int i
= 0; i
< num_ops
; ++i
)
112 new_op
->ops
[i
+ 1] = orig_op
->ops
[i
];
113 tree else_value
= orig_op
->cond
.else_value
;
115 else_value
= targetm
.preferred_else_value (ifn
, orig_op
->type
,
116 num_ops
, orig_op
->ops
);
117 new_op
->ops
[num_ops
+ 1] = else_value
;
121 /* RES_OP is the result of a simplification. If it is conditional,
122 try to replace it with the equivalent UNCOND form, such as an
123 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
124 result of the replacement if appropriate, adding any new statements to
125 SEQ and using VALUEIZE as the valueization function. Return true if
126 this resimplification occurred and resulted in at least one change. */
129 maybe_resimplify_conditional_op (gimple_seq
*seq
, gimple_match_op
*res_op
,
130 tree (*valueize
) (tree
))
132 if (!res_op
->cond
.cond
)
135 if (!res_op
->cond
.else_value
136 && res_op
->code
.is_tree_code ())
138 /* The "else" value doesn't matter. If the "then" value is a
139 gimple value, just use it unconditionally. This isn't a
140 simplification in itself, since there was no operation to
141 build in the first place. */
142 if (gimple_simplified_result_is_gimple_val (res_op
))
144 res_op
->cond
.cond
= NULL_TREE
;
148 /* Likewise if the operation would not trap. */
149 bool honor_trapv
= (INTEGRAL_TYPE_P (res_op
->type
)
150 && TYPE_OVERFLOW_TRAPS (res_op
->type
));
151 tree_code op_code
= (tree_code
) res_op
->code
;
154 /* COND_EXPR will trap if, and only if, the condition
155 traps and hence we have to check this. For all other operations, we
156 don't need to consider the operands. */
157 if (op_code
== COND_EXPR
)
158 op_could_trap
= generic_expr_could_trap_p (res_op
->ops
[0]);
160 op_could_trap
= operation_could_trap_p ((tree_code
) res_op
->code
,
161 FLOAT_TYPE_P (res_op
->type
),
163 res_op
->op_or_null (1));
167 res_op
->cond
.cond
= NULL_TREE
;
172 /* If the "then" value is a gimple value and the "else" value matters,
173 create a VEC_COND_EXPR between them, then see if it can be further
175 gimple_match_op new_op
;
176 if (res_op
->cond
.else_value
177 && VECTOR_TYPE_P (res_op
->type
)
178 && gimple_simplified_result_is_gimple_val (res_op
))
180 new_op
.set_op (VEC_COND_EXPR
, res_op
->type
,
181 res_op
->cond
.cond
, res_op
->ops
[0],
182 res_op
->cond
.else_value
);
184 return gimple_resimplify3 (seq
, res_op
, valueize
);
187 /* Otherwise try rewriting the operation as an IFN_COND_* call.
188 Again, this isn't a simplification in itself, since it's what
189 RES_OP already described. */
190 if (convert_conditional_op (res_op
, &new_op
))
196 /* Helper that matches and simplifies the toplevel result from
197 a gimple_simplify run (where we don't want to build
198 a stmt in case it's used in in-place folding). Replaces
199 RES_OP with a simplified and/or canonicalized result and
200 returns whether any change was made. */
203 gimple_resimplify1 (gimple_seq
*seq
, gimple_match_op
*res_op
,
204 tree (*valueize
)(tree
))
206 if (constant_for_folding (res_op
->ops
[0]))
208 tree tem
= NULL_TREE
;
209 if (res_op
->code
.is_tree_code ())
211 auto code
= tree_code (res_op
->code
);
212 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
213 && TREE_CODE_LENGTH (code
) == 1)
214 tem
= const_unop (code
, res_op
->type
, res_op
->ops
[0]);
217 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
220 && CONSTANT_CLASS_P (tem
))
222 if (TREE_OVERFLOW_P (tem
))
223 tem
= drop_tree_overflow (tem
);
224 res_op
->set_value (tem
);
225 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
230 /* Limit recursion, there are cases like PR80887 and others, for
231 example when value-numbering presents us with unfolded expressions
232 that we are really not prepared to handle without eventual
233 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
234 itself as available expression. */
235 static unsigned depth
;
238 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
239 fprintf (dump_file
, "Aborting expression simplification due to "
245 gimple_match_op
res_op2 (*res_op
);
246 if (gimple_simplify (&res_op2
, seq
, valueize
,
247 res_op
->code
, res_op
->type
, res_op
->ops
[0]))
255 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
261 /* Helper that matches and simplifies the toplevel result from
262 a gimple_simplify run (where we don't want to build
263 a stmt in case it's used in in-place folding). Replaces
264 RES_OP with a simplified and/or canonicalized result and
265 returns whether any change was made. */
268 gimple_resimplify2 (gimple_seq
*seq
, gimple_match_op
*res_op
,
269 tree (*valueize
)(tree
))
271 if (constant_for_folding (res_op
->ops
[0])
272 && constant_for_folding (res_op
->ops
[1]))
274 tree tem
= NULL_TREE
;
275 if (res_op
->code
.is_tree_code ())
277 auto code
= tree_code (res_op
->code
);
278 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
279 && TREE_CODE_LENGTH (code
) == 2)
280 tem
= const_binop (code
, res_op
->type
,
281 res_op
->ops
[0], res_op
->ops
[1]);
284 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
285 res_op
->ops
[0], res_op
->ops
[1]);
287 && CONSTANT_CLASS_P (tem
))
289 if (TREE_OVERFLOW_P (tem
))
290 tem
= drop_tree_overflow (tem
);
291 res_op
->set_value (tem
);
292 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
297 /* Canonicalize operand order. */
298 bool canonicalized
= false;
300 = (res_op
->code
.is_tree_code ()
301 && TREE_CODE_CLASS (tree_code (res_op
->code
)) == tcc_comparison
);
302 if ((is_comparison
|| commutative_binary_op_p (res_op
->code
, res_op
->type
))
303 && tree_swap_operands_p (res_op
->ops
[0], res_op
->ops
[1]))
305 std::swap (res_op
->ops
[0], res_op
->ops
[1]);
307 res_op
->code
= swap_tree_comparison (tree_code (res_op
->code
));
308 canonicalized
= true;
311 /* Limit recursion, see gimple_resimplify1. */
312 static unsigned depth
;
315 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
316 fprintf (dump_file
, "Aborting expression simplification due to "
322 gimple_match_op
res_op2 (*res_op
);
323 if (gimple_simplify (&res_op2
, seq
, valueize
,
324 res_op
->code
, res_op
->type
,
325 res_op
->ops
[0], res_op
->ops
[1]))
333 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
336 return canonicalized
;
339 /* Helper that matches and simplifies the toplevel result from
340 a gimple_simplify run (where we don't want to build
341 a stmt in case it's used in in-place folding). Replaces
342 RES_OP with a simplified and/or canonicalized result and
343 returns whether any change was made. */
346 gimple_resimplify3 (gimple_seq
*seq
, gimple_match_op
*res_op
,
347 tree (*valueize
)(tree
))
349 if (constant_for_folding (res_op
->ops
[0])
350 && constant_for_folding (res_op
->ops
[1])
351 && constant_for_folding (res_op
->ops
[2]))
353 tree tem
= NULL_TREE
;
354 if (res_op
->code
.is_tree_code ())
356 auto code
= tree_code (res_op
->code
);
357 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
358 && TREE_CODE_LENGTH (code
) == 3)
359 tem
= fold_ternary
/*_to_constant*/ (code
, res_op
->type
,
360 res_op
->ops
[0], res_op
->ops
[1],
364 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
365 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]);
367 && CONSTANT_CLASS_P (tem
))
369 if (TREE_OVERFLOW_P (tem
))
370 tem
= drop_tree_overflow (tem
);
371 res_op
->set_value (tem
);
372 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
377 /* Canonicalize operand order. */
378 bool canonicalized
= false;
379 int argno
= first_commutative_argument (res_op
->code
, res_op
->type
);
381 && tree_swap_operands_p (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]))
383 std::swap (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]);
384 canonicalized
= true;
387 /* Limit recursion, see gimple_resimplify1. */
388 static unsigned depth
;
391 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
392 fprintf (dump_file
, "Aborting expression simplification due to "
398 gimple_match_op
res_op2 (*res_op
);
399 if (gimple_simplify (&res_op2
, seq
, valueize
,
400 res_op
->code
, res_op
->type
,
401 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]))
409 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
412 return canonicalized
;
415 /* Helper that matches and simplifies the toplevel result from
416 a gimple_simplify run (where we don't want to build
417 a stmt in case it's used in in-place folding). Replaces
418 RES_OP with a simplified and/or canonicalized result and
419 returns whether any change was made. */
422 gimple_resimplify4 (gimple_seq
*seq
, gimple_match_op
*res_op
,
423 tree (*valueize
)(tree
))
425 /* No constant folding is defined for four-operand functions. */
427 /* Canonicalize operand order. */
428 bool canonicalized
= false;
429 int argno
= first_commutative_argument (res_op
->code
, res_op
->type
);
431 && tree_swap_operands_p (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]))
433 std::swap (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]);
434 canonicalized
= true;
437 /* Limit recursion, see gimple_resimplify1. */
438 static unsigned depth
;
441 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
442 fprintf (dump_file
, "Aborting expression simplification due to "
448 gimple_match_op
res_op2 (*res_op
);
449 if (gimple_simplify (&res_op2
, seq
, valueize
,
450 res_op
->code
, res_op
->type
,
451 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2],
460 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
463 return canonicalized
;
466 /* Helper that matches and simplifies the toplevel result from
467 a gimple_simplify run (where we don't want to build
468 a stmt in case it's used in in-place folding). Replaces
469 RES_OP with a simplified and/or canonicalized result and
470 returns whether any change was made. */
473 gimple_resimplify5 (gimple_seq
*seq
, gimple_match_op
*res_op
,
474 tree (*valueize
)(tree
))
476 /* No constant folding is defined for five-operand functions. */
478 /* Canonicalize operand order. */
479 bool canonicalized
= false;
480 int argno
= first_commutative_argument (res_op
->code
, res_op
->type
);
482 && tree_swap_operands_p (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]))
484 std::swap (res_op
->ops
[argno
], res_op
->ops
[argno
+ 1]);
485 canonicalized
= true;
488 gimple_match_op
res_op2 (*res_op
);
489 if (gimple_simplify (&res_op2
, seq
, valueize
,
490 res_op
->code
, res_op
->type
,
491 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2],
492 res_op
->ops
[3], res_op
->ops
[4]))
498 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
501 return canonicalized
;
504 /* Match and simplify the toplevel valueized operation THIS.
505 Replaces THIS with a simplified and/or canonicalized result and
506 returns whether any change was made. */
509 gimple_match_op::resimplify (gimple_seq
*seq
, tree (*valueize
)(tree
))
514 return gimple_resimplify1 (seq
, this, valueize
);
516 return gimple_resimplify2 (seq
, this, valueize
);
518 return gimple_resimplify3 (seq
, this, valueize
);
520 return gimple_resimplify4 (seq
, this, valueize
);
522 return gimple_resimplify5 (seq
, this, valueize
);
528 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
529 build a GENERIC tree for that expression and update RES_OP accordingly. */
532 maybe_build_generic_op (gimple_match_op
*res_op
)
534 tree_code code
= (tree_code
) res_op
->code
;
540 case VIEW_CONVERT_EXPR
:
541 val
= build1 (code
, res_op
->type
, res_op
->ops
[0]);
542 res_op
->set_value (val
);
545 val
= build3 (code
, res_op
->type
, res_op
->ops
[0], res_op
->ops
[1],
547 REF_REVERSE_STORAGE_ORDER (val
) = res_op
->reverse
;
548 res_op
->set_value (val
);
554 tree (*mprts_hook
) (gimple_match_op
*);
556 /* Try to build RES_OP, which is known to be a call to FN. Return null
557 if the target doesn't support the function. */
560 build_call_internal (internal_fn fn
, gimple_match_op
*res_op
)
562 if (direct_internal_fn_p (fn
))
564 tree_pair types
= direct_internal_fn_types (fn
, res_op
->type
,
566 if (!direct_internal_fn_supported_p (fn
, types
, OPTIMIZE_FOR_BOTH
))
569 return gimple_build_call_internal (fn
, res_op
->num_ops
,
570 res_op
->op_or_null (0),
571 res_op
->op_or_null (1),
572 res_op
->op_or_null (2),
573 res_op
->op_or_null (3),
574 res_op
->op_or_null (4));
577 /* Push the exploded expression described by RES_OP as a statement to
578 SEQ if necessary and return a gimple value denoting the value of the
579 expression. If RES is not NULL then the result will be always RES
580 and even gimple values are pushed to SEQ. */
583 maybe_push_res_to_seq (gimple_match_op
*res_op
, gimple_seq
*seq
, tree res
)
585 tree
*ops
= res_op
->ops
;
586 unsigned num_ops
= res_op
->num_ops
;
588 /* The caller should have converted conditional operations into an UNCOND
589 form and resimplified as appropriate. The conditional form only
590 survives this far if that conversion failed. */
591 if (res_op
->cond
.cond
)
594 if (res_op
->code
.is_tree_code ())
597 && gimple_simplified_result_is_gimple_val (res_op
))
601 tree tem
= mprts_hook (res_op
);
610 /* Play safe and do not allow abnormals to be mentioned in
611 newly created statements. */
612 for (unsigned int i
= 0; i
< num_ops
; ++i
)
613 if (TREE_CODE (ops
[i
]) == SSA_NAME
614 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
]))
617 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
618 for (unsigned int i
= 0; i
< 2; ++i
)
619 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
620 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
)))
623 if (res_op
->code
.is_tree_code ())
625 auto code
= tree_code (res_op
->code
);
628 if (gimple_in_ssa_p (cfun
))
629 res
= make_ssa_name (res_op
->type
);
631 res
= create_tmp_reg (res_op
->type
);
633 maybe_build_generic_op (res_op
);
634 gimple
*new_stmt
= gimple_build_assign (res
, code
,
635 res_op
->op_or_null (0),
636 res_op
->op_or_null (1),
637 res_op
->op_or_null (2));
638 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
643 gcc_assert (num_ops
!= 0);
644 auto fn
= combined_fn (res_op
->code
);
645 gcall
*new_stmt
= NULL
;
646 if (internal_fn_p (fn
))
648 /* Generate the given function if we can. */
649 internal_fn ifn
= as_internal_fn (fn
);
650 new_stmt
= build_call_internal (ifn
, res_op
);
656 /* Find the function we want to call. */
657 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
661 /* We can't and should not emit calls to non-const functions. */
662 if (!(flags_from_decl_or_type (decl
) & ECF_CONST
))
665 new_stmt
= gimple_build_call (decl
, num_ops
,
666 res_op
->op_or_null (0),
667 res_op
->op_or_null (1),
668 res_op
->op_or_null (2),
669 res_op
->op_or_null (3),
670 res_op
->op_or_null (4));
674 if (gimple_in_ssa_p (cfun
))
675 res
= make_ssa_name (res_op
->type
);
677 res
= create_tmp_reg (res_op
->type
);
679 gimple_call_set_lhs (new_stmt
, res
);
680 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
686 /* Public API overloads follow for operation being tree_code or
687 built_in_function and for one to three operands or arguments.
688 They return NULL_TREE if nothing could be simplified or
689 the resulting simplified value with parts pushed to SEQ.
690 If SEQ is NULL then if the simplification needs to create
691 new stmts it will fail. If VALUEIZE is non-NULL then all
692 SSA names will be valueized using that hook prior to
693 applying simplifications. */
698 gimple_simplify (enum tree_code code
, tree type
,
700 gimple_seq
*seq
, tree (*valueize
)(tree
))
702 if (constant_for_folding (op0
))
704 tree res
= const_unop (code
, type
, op0
);
706 && CONSTANT_CLASS_P (res
))
710 gimple_match_op res_op
;
711 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
))
713 return maybe_push_res_to_seq (&res_op
, seq
);
719 gimple_simplify (enum tree_code code
, tree type
,
721 gimple_seq
*seq
, tree (*valueize
)(tree
))
723 if (constant_for_folding (op0
) && constant_for_folding (op1
))
725 tree res
= const_binop (code
, type
, op0
, op1
);
727 && CONSTANT_CLASS_P (res
))
731 /* Canonicalize operand order both for matching and fallback stmt
733 if ((commutative_tree_code (code
)
734 || TREE_CODE_CLASS (code
) == tcc_comparison
)
735 && tree_swap_operands_p (op0
, op1
))
737 std::swap (op0
, op1
);
738 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
739 code
= swap_tree_comparison (code
);
742 gimple_match_op res_op
;
743 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
))
745 return maybe_push_res_to_seq (&res_op
, seq
);
751 gimple_simplify (enum tree_code code
, tree type
,
752 tree op0
, tree op1
, tree op2
,
753 gimple_seq
*seq
, tree (*valueize
)(tree
))
755 if (constant_for_folding (op0
) && constant_for_folding (op1
)
756 && constant_for_folding (op2
))
758 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
760 && CONSTANT_CLASS_P (res
))
764 /* Canonicalize operand order both for matching and fallback stmt
766 if (commutative_ternary_tree_code (code
)
767 && tree_swap_operands_p (op0
, op1
))
768 std::swap (op0
, op1
);
770 gimple_match_op res_op
;
771 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
, op2
))
773 return maybe_push_res_to_seq (&res_op
, seq
);
776 /* Builtin or internal function with one argument. */
779 gimple_simplify (combined_fn fn
, tree type
,
781 gimple_seq
*seq
, tree (*valueize
)(tree
))
783 if (constant_for_folding (arg0
))
785 tree res
= fold_const_call (fn
, type
, arg0
);
786 if (res
&& CONSTANT_CLASS_P (res
))
790 gimple_match_op res_op
;
791 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
))
793 return maybe_push_res_to_seq (&res_op
, seq
);
796 /* Builtin or internal function with two arguments. */
799 gimple_simplify (combined_fn fn
, tree type
,
800 tree arg0
, tree arg1
,
801 gimple_seq
*seq
, tree (*valueize
)(tree
))
803 if (constant_for_folding (arg0
)
804 && constant_for_folding (arg1
))
806 tree res
= fold_const_call (fn
, type
, arg0
, arg1
);
807 if (res
&& CONSTANT_CLASS_P (res
))
811 gimple_match_op res_op
;
812 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
))
814 return maybe_push_res_to_seq (&res_op
, seq
);
817 /* Builtin or internal function with three arguments. */
820 gimple_simplify (combined_fn fn
, tree type
,
821 tree arg0
, tree arg1
, tree arg2
,
822 gimple_seq
*seq
, tree (*valueize
)(tree
))
824 if (constant_for_folding (arg0
)
825 && constant_for_folding (arg1
)
826 && constant_for_folding (arg2
))
828 tree res
= fold_const_call (fn
, type
, arg0
, arg1
, arg2
);
829 if (res
&& CONSTANT_CLASS_P (res
))
833 gimple_match_op res_op
;
834 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
, arg2
))
836 return maybe_push_res_to_seq (&res_op
, seq
);
839 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
840 VALUEIZED to true if valueization changed OP. */
843 do_valueize (tree op
, tree (*valueize
)(tree
), bool &valueized
)
845 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
847 tree tem
= valueize (op
);
848 if (tem
&& tem
!= op
)
857 /* If RES_OP is a call to a conditional internal function, try simplifying
858 the associated unconditional operation and using the result to build
859 a new conditional operation. For example, if RES_OP is:
861 IFN_COND_ADD (COND, A, B, ELSE)
863 try simplifying (plus A B) and using the result to build a replacement
864 for the whole IFN_COND_ADD.
866 Return true if this approach led to a simplification, otherwise leave
867 RES_OP unchanged (and so suitable for other simplifications). When
868 returning true, add any new statements to SEQ and use VALUEIZE as the
869 valueization function.
871 RES_OP is known to be a call to IFN. */
874 try_conditional_simplification (internal_fn ifn
, gimple_match_op
*res_op
,
875 gimple_seq
*seq
, tree (*valueize
) (tree
))
878 tree_code code
= conditional_internal_fn_code (ifn
);
879 if (code
!= ERROR_MARK
)
883 ifn
= get_unconditional_internal_fn (ifn
);
886 op
= as_combined_fn (ifn
);
889 unsigned int num_ops
= res_op
->num_ops
;
890 gimple_match_op
cond_op (gimple_match_cond (res_op
->ops
[0],
891 res_op
->ops
[num_ops
- 1]),
892 op
, res_op
->type
, num_ops
- 2);
894 memcpy (cond_op
.ops
, res_op
->ops
+ 1, (num_ops
- 1) * sizeof *cond_op
.ops
);
898 if (!gimple_resimplify1 (seq
, &cond_op
, valueize
))
902 if (!gimple_resimplify2 (seq
, &cond_op
, valueize
))
906 if (!gimple_resimplify3 (seq
, &cond_op
, valueize
))
913 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
917 /* Common subroutine of gimple_extract_op and gimple_simplify. Try to
918 describe STMT in RES_OP, returning true on success. Before recording
921 - VALUEIZE_CONDITION for a COND_EXPR condition
922 - VALUEIZE_OP for every other top-level operand
924 Both routines take a tree argument and returns a tree. */
926 template<typename ValueizeOp
, typename ValueizeCondition
>
928 gimple_extract (gimple
*stmt
, gimple_match_op
*res_op
,
929 ValueizeOp valueize_op
,
930 ValueizeCondition valueize_condition
)
932 switch (gimple_code (stmt
))
936 enum tree_code code
= gimple_assign_rhs_code (stmt
);
937 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
938 switch (gimple_assign_rhs_class (stmt
))
940 case GIMPLE_SINGLE_RHS
:
941 if (code
== REALPART_EXPR
942 || code
== IMAGPART_EXPR
943 || code
== VIEW_CONVERT_EXPR
)
945 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
946 res_op
->set_op (code
, type
, valueize_op (op0
));
949 else if (code
== BIT_FIELD_REF
)
951 tree rhs1
= gimple_assign_rhs1 (stmt
);
952 tree op0
= valueize_op (TREE_OPERAND (rhs1
, 0));
953 res_op
->set_op (code
, type
, op0
,
954 TREE_OPERAND (rhs1
, 1),
955 TREE_OPERAND (rhs1
, 2),
956 REF_REVERSE_STORAGE_ORDER (rhs1
));
959 else if (code
== SSA_NAME
)
961 tree op0
= gimple_assign_rhs1 (stmt
);
962 res_op
->set_op (TREE_CODE (op0
), type
, valueize_op (op0
));
966 case GIMPLE_UNARY_RHS
:
968 tree rhs1
= gimple_assign_rhs1 (stmt
);
969 res_op
->set_op (code
, type
, valueize_op (rhs1
));
972 case GIMPLE_BINARY_RHS
:
974 tree rhs1
= valueize_op (gimple_assign_rhs1 (stmt
));
975 tree rhs2
= valueize_op (gimple_assign_rhs2 (stmt
));
976 res_op
->set_op (code
, type
, rhs1
, rhs2
);
979 case GIMPLE_TERNARY_RHS
:
981 tree rhs1
= gimple_assign_rhs1 (stmt
);
982 if (code
== COND_EXPR
&& COMPARISON_CLASS_P (rhs1
))
983 rhs1
= valueize_condition (rhs1
);
985 rhs1
= valueize_op (rhs1
);
986 tree rhs2
= valueize_op (gimple_assign_rhs2 (stmt
));
987 tree rhs3
= valueize_op (gimple_assign_rhs3 (stmt
));
988 res_op
->set_op (code
, type
, rhs1
, rhs2
, rhs3
);
998 /* ??? This way we can't simplify calls with side-effects. */
999 if (gimple_call_lhs (stmt
) != NULL_TREE
1000 && gimple_call_num_args (stmt
) >= 1
1001 && gimple_call_num_args (stmt
) <= 5)
1004 if (gimple_call_internal_p (stmt
))
1005 cfn
= as_combined_fn (gimple_call_internal_fn (stmt
));
1008 tree fn
= gimple_call_fn (stmt
);
1012 fn
= valueize_op (fn
);
1013 if (TREE_CODE (fn
) != ADDR_EXPR
1014 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
1017 tree decl
= TREE_OPERAND (fn
, 0);
1018 if (DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_NORMAL
1019 || !gimple_builtin_call_types_compatible_p (stmt
, decl
))
1022 cfn
= as_combined_fn (DECL_FUNCTION_CODE (decl
));
1025 unsigned int num_args
= gimple_call_num_args (stmt
);
1026 res_op
->set_op (cfn
, TREE_TYPE (gimple_call_lhs (stmt
)), num_args
);
1027 for (unsigned i
= 0; i
< num_args
; ++i
)
1028 res_op
->ops
[i
] = valueize_op (gimple_call_arg (stmt
, i
));
1035 tree lhs
= valueize_op (gimple_cond_lhs (stmt
));
1036 tree rhs
= valueize_op (gimple_cond_rhs (stmt
));
1037 res_op
->set_op (gimple_cond_code (stmt
), boolean_type_node
, lhs
, rhs
);
1048 /* Try to describe STMT in RES_OP, returning true on success.
1049 For GIMPLE_CONDs, describe the condition that is being tested.
1050 For GIMPLE_ASSIGNs, describe the rhs of the assignment.
1051 For GIMPLE_CALLs, describe the call. */
1054 gimple_extract_op (gimple
*stmt
, gimple_match_op
*res_op
)
1056 auto nop
= [](tree op
) { return op
; };
1057 return gimple_extract (stmt
, res_op
, nop
, nop
);
1060 /* The main STMT based simplification entry. It is used by the fold_stmt
1061 and the fold_stmt_to_constant APIs. */
1064 gimple_simplify (gimple
*stmt
, gimple_match_op
*res_op
, gimple_seq
*seq
,
1065 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
1067 bool valueized
= false;
1068 auto valueize_op
= [&](tree op
)
1070 return do_valueize (op
, top_valueize
, valueized
);
1072 auto valueize_condition
= [&](tree op
) -> tree
1074 bool cond_valueized
= false;
1075 tree lhs
= do_valueize (TREE_OPERAND (op
, 0), top_valueize
,
1077 tree rhs
= do_valueize (TREE_OPERAND (op
, 1), top_valueize
,
1079 gimple_match_op
res_op2 (res_op
->cond
, TREE_CODE (op
),
1080 TREE_TYPE (op
), lhs
, rhs
);
1081 if ((gimple_resimplify2 (seq
, &res_op2
, valueize
)
1083 && res_op2
.code
.is_tree_code ())
1085 auto code
= tree_code (res_op2
.code
);
1086 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1089 return build2 (code
, TREE_TYPE (op
),
1090 res_op2
.ops
[0], res_op2
.ops
[1]);
1092 else if (code
== SSA_NAME
1093 || code
== INTEGER_CST
1094 || code
== VECTOR_CST
)
1097 return res_op2
.ops
[0];
1100 return valueize_op (op
);
1103 if (!gimple_extract (stmt
, res_op
, valueize_op
, valueize_condition
))
1106 if (res_op
->code
.is_internal_fn ())
1108 internal_fn ifn
= internal_fn (res_op
->code
);
1109 if (try_conditional_simplification (ifn
, res_op
, seq
, valueize
))
1113 if (!res_op
->reverse
1115 && res_op
->resimplify (seq
, valueize
))
1121 /* Helper for the autogenerated code, valueize OP. */
1124 do_valueize (tree (*valueize
)(tree
), tree op
)
1126 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
1128 tree tem
= valueize (op
);
1135 /* Helper for the autogenerated code, get at the definition of NAME when
1136 VALUEIZE allows that. */
1139 get_def (tree (*valueize
)(tree
), tree name
)
1141 if (valueize
&& ! valueize (name
))
1143 return SSA_NAME_DEF_STMT (name
);
1146 /* Routine to determine if the types T1 and T2 are effectively
1147 the same for GIMPLE. If T1 or T2 is not a type, the test
1148 applies to their TREE_TYPE. */
1151 types_match (tree t1
, tree t2
)
1154 t1
= TREE_TYPE (t1
);
1156 t2
= TREE_TYPE (t2
);
1158 return types_compatible_p (t1
, t2
);
1161 /* Return if T has a single use. For GIMPLE, we also allow any
1162 non-SSA_NAME (ie constants) and zero uses to cope with uses
1163 that aren't linked up yet. */
1166 single_use (const_tree
) ATTRIBUTE_PURE
;
1169 single_use (const_tree t
)
1171 if (TREE_CODE (t
) != SSA_NAME
)
1174 /* Inline return has_zero_uses (t) || has_single_use (t); */
1175 const ssa_use_operand_t
*const head
= &(SSA_NAME_IMM_USE_NODE (t
));
1176 const ssa_use_operand_t
*ptr
;
1177 bool single
= false;
1179 for (ptr
= head
->next
; ptr
!= head
; ptr
= ptr
->next
)
1180 if (USE_STMT(ptr
) && !is_gimple_debug (USE_STMT (ptr
)))
1189 /* Return true if math operations should be canonicalized,
1190 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1193 canonicalize_math_p ()
1195 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_opt_math
) == 0;
1198 /* Return true if math operations that are beneficial only after
1199 vectorization should be canonicalized. */
1202 canonicalize_math_after_vectorization_p ()
1204 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) != 0;
1207 /* Return true if we can still perform transformations that may introduce
1208 vector operations that are not supported by the target. Vector lowering
1209 normally handles those, but after that pass, it becomes unsafe. */
1212 optimize_vectors_before_lowering_p ()
1214 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) == 0;
1217 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1218 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1219 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1220 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1221 will likely be exact, while exp (log (arg0) * arg1) might be not.
1222 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1225 optimize_pow_to_exp (tree arg0
, tree arg1
)
1227 gcc_assert (TREE_CODE (arg0
) == REAL_CST
);
1228 if (!real_isinteger (TREE_REAL_CST_PTR (arg0
), TYPE_MODE (TREE_TYPE (arg0
))))
1231 if (TREE_CODE (arg1
) != SSA_NAME
)
1234 gimple
*def
= SSA_NAME_DEF_STMT (arg1
);
1235 gphi
*phi
= dyn_cast
<gphi
*> (def
);
1236 tree cst1
= NULL_TREE
;
1237 enum tree_code code
= ERROR_MARK
;
1240 if (!is_gimple_assign (def
))
1242 code
= gimple_assign_rhs_code (def
);
1251 if (TREE_CODE (gimple_assign_rhs1 (def
)) != SSA_NAME
1252 || TREE_CODE (gimple_assign_rhs2 (def
)) != REAL_CST
)
1255 cst1
= gimple_assign_rhs2 (def
);
1257 phi
= dyn_cast
<gphi
*> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def
)));
1262 tree cst2
= NULL_TREE
;
1263 int n
= gimple_phi_num_args (phi
);
1264 for (int i
= 0; i
< n
; i
++)
1266 tree arg
= PHI_ARG_DEF (phi
, i
);
1267 if (TREE_CODE (arg
) != REAL_CST
)
1269 else if (cst2
== NULL_TREE
)
1271 else if (!operand_equal_p (cst2
, arg
, 0))
1276 cst2
= const_binop (code
, TREE_TYPE (cst2
), cst2
, cst1
);
1278 && TREE_CODE (cst2
) == REAL_CST
1279 && real_isinteger (TREE_REAL_CST_PTR (cst2
),
1280 TYPE_MODE (TREE_TYPE (cst2
))))
1285 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1286 is another division can be optimized. Don't optimize if INNER_DIV
1287 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1290 optimize_successive_divisions_p (tree divisor
, tree inner_div
)
1292 if (!gimple_in_ssa_p (cfun
))
1295 imm_use_iterator imm_iter
;
1296 use_operand_p use_p
;
1297 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, inner_div
)
1299 gimple
*use_stmt
= USE_STMT (use_p
);
1300 if (!is_gimple_assign (use_stmt
)
1301 || gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
1302 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), divisor
, 0))
1309 /* Return a canonical form for CODE when operating on TYPE. The idea
1310 is to remove redundant ways of representing the same operation so
1311 that code_helpers can be hashed and compared for equality.
1313 The only current canonicalization is to replace built-in functions
1314 with internal functions, in cases where internal-fn.def defines
1315 such an internal function.
1317 Note that the new code_helper cannot necessarily be used in place of
1318 the original code_helper. For example, the new code_helper might be
1319 an internal function that the target does not support. */
1322 canonicalize_code (code_helper code
, tree type
)
1324 if (code
.is_fn_code ())
1325 return associated_internal_fn (combined_fn (code
), type
);
1329 /* Return true if CODE is a binary operation and if CODE is commutative when
1330 operating on type TYPE. */
1333 commutative_binary_op_p (code_helper code
, tree type
)
1335 if (code
.is_tree_code ())
1336 return commutative_tree_code (tree_code (code
));
1337 auto cfn
= combined_fn (code
);
1338 return commutative_binary_fn_p (associated_internal_fn (cfn
, type
));
1341 /* Return true if CODE represents a ternary operation and if the first two
1342 operands are commutative when CODE is operating on TYPE. */
1345 commutative_ternary_op_p (code_helper code
, tree type
)
1347 if (code
.is_tree_code ())
1348 return commutative_ternary_tree_code (tree_code (code
));
1349 auto cfn
= combined_fn (code
);
1350 return commutative_ternary_fn_p (associated_internal_fn (cfn
, type
));
1353 /* If CODE is commutative in two consecutive operands, return the
1354 index of the first, otherwise return -1. */
1357 first_commutative_argument (code_helper code
, tree type
)
1359 if (code
.is_tree_code ())
1361 auto tcode
= tree_code (code
);
1362 if (commutative_tree_code (tcode
)
1363 || commutative_ternary_tree_code (tcode
))
1367 auto cfn
= combined_fn (code
);
1368 return first_commutative_argument (associated_internal_fn (cfn
, type
));
1371 /* Return true if CODE is a binary operation that is associative when
1372 operating on type TYPE. */
1375 associative_binary_op_p (code_helper code
, tree type
)
1377 if (code
.is_tree_code ())
1378 return associative_tree_code (tree_code (code
));
1379 auto cfn
= combined_fn (code
);
1380 return associative_binary_fn_p (associated_internal_fn (cfn
, type
));
1383 /* Return true if the target directly supports operation CODE on type TYPE.
1384 QUERY_TYPE acts as for optab_for_tree_code. */
1387 directly_supported_p (code_helper code
, tree type
, optab_subtype query_type
)
1389 if (code
.is_tree_code ())
1391 direct_optab optab
= optab_for_tree_code (tree_code (code
), type
,
1393 return (optab
!= unknown_optab
1394 && optab_handler (optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
);
1396 gcc_assert (query_type
== optab_default
1397 || (query_type
== optab_vector
&& VECTOR_TYPE_P (type
))
1398 || (query_type
== optab_scalar
&& !VECTOR_TYPE_P (type
)));
1399 internal_fn ifn
= associated_internal_fn (combined_fn (code
), type
);
1400 return (direct_internal_fn_p (ifn
)
1401 && direct_internal_fn_supported_p (ifn
, type
, OPTIMIZE_FOR_SPEED
));
1404 /* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn
1405 for a code_helper CODE operating on type TYPE. */
1408 get_conditional_internal_fn (code_helper code
, tree type
)
1410 if (code
.is_tree_code ())
1411 return get_conditional_internal_fn (tree_code (code
));
1412 auto cfn
= combined_fn (code
);
1413 return get_conditional_internal_fn (associated_internal_fn (cfn
, type
));