1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
43 #include "optabs-tree.h"
47 #include "gimple-range.h"
49 /* Forward declarations of the private auto-generated matchers.
50 They expect valueized operands in canonical order and do not
51 perform simplification of all-constant operands. */
52 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
53 code_helper
, tree
, tree
);
54 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
55 code_helper
, tree
, tree
, tree
);
56 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
57 code_helper
, tree
, tree
, tree
, tree
);
58 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
59 code_helper
, tree
, tree
, tree
, tree
, tree
);
60 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
61 code_helper
, tree
, tree
, tree
, tree
, tree
, tree
);
62 static bool gimple_resimplify1 (gimple_seq
*, gimple_match_op
*,
64 static bool gimple_resimplify2 (gimple_seq
*, gimple_match_op
*,
66 static bool gimple_resimplify3 (gimple_seq
*, gimple_match_op
*,
68 static bool gimple_resimplify4 (gimple_seq
*, gimple_match_op
*,
70 static bool gimple_resimplify5 (gimple_seq
*, gimple_match_op
*,
73 const unsigned int gimple_match_op::MAX_NUM_OPS
;
75 /* Return whether T is a constant that we'll dispatch to fold to
76 evaluate fully constant expressions. */
79 constant_for_folding (tree t
)
81 return (CONSTANT_CLASS_P (t
)
82 /* The following is only interesting to string builtins. */
83 || (TREE_CODE (t
) == ADDR_EXPR
84 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
87 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
88 operation. Return true on success, storing the new operation in NEW_OP. */
91 convert_conditional_op (gimple_match_op
*orig_op
,
92 gimple_match_op
*new_op
)
95 if (orig_op
->code
.is_tree_code ())
96 ifn
= get_conditional_internal_fn ((tree_code
) orig_op
->code
);
99 combined_fn cfn
= orig_op
->code
;
100 if (!internal_fn_p (cfn
))
102 ifn
= get_conditional_internal_fn (as_internal_fn (cfn
));
106 unsigned int num_ops
= orig_op
->num_ops
;
107 new_op
->set_op (as_combined_fn (ifn
), orig_op
->type
, num_ops
+ 2);
108 new_op
->ops
[0] = orig_op
->cond
.cond
;
109 for (unsigned int i
= 0; i
< num_ops
; ++i
)
110 new_op
->ops
[i
+ 1] = orig_op
->ops
[i
];
111 tree else_value
= orig_op
->cond
.else_value
;
113 else_value
= targetm
.preferred_else_value (ifn
, orig_op
->type
,
114 num_ops
, orig_op
->ops
);
115 new_op
->ops
[num_ops
+ 1] = else_value
;
119 /* RES_OP is the result of a simplification. If it is conditional,
120 try to replace it with the equivalent UNCOND form, such as an
121 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
122 result of the replacement if appropriate, adding any new statements to
123 SEQ and using VALUEIZE as the valueization function. Return true if
124 this resimplification occurred and resulted in at least one change. */
127 maybe_resimplify_conditional_op (gimple_seq
*seq
, gimple_match_op
*res_op
,
128 tree (*valueize
) (tree
))
130 if (!res_op
->cond
.cond
)
133 if (!res_op
->cond
.else_value
134 && res_op
->code
.is_tree_code ())
136 /* The "else" value doesn't matter. If the "then" value is a
137 gimple value, just use it unconditionally. This isn't a
138 simplification in itself, since there was no operation to
139 build in the first place. */
140 if (gimple_simplified_result_is_gimple_val (res_op
))
142 res_op
->cond
.cond
= NULL_TREE
;
146 /* Likewise if the operation would not trap. */
147 bool honor_trapv
= (INTEGRAL_TYPE_P (res_op
->type
)
148 && TYPE_OVERFLOW_TRAPS (res_op
->type
));
149 tree_code op_code
= (tree_code
) res_op
->code
;
152 /* COND_EXPR will trap if, and only if, the condition
153 traps and hence we have to check this. For all other operations, we
154 don't need to consider the operands. */
155 if (op_code
== COND_EXPR
)
156 op_could_trap
= generic_expr_could_trap_p (res_op
->ops
[0]);
158 op_could_trap
= operation_could_trap_p ((tree_code
) res_op
->code
,
159 FLOAT_TYPE_P (res_op
->type
),
161 res_op
->op_or_null (1));
165 res_op
->cond
.cond
= NULL_TREE
;
170 /* If the "then" value is a gimple value and the "else" value matters,
171 create a VEC_COND_EXPR between them, then see if it can be further
173 gimple_match_op new_op
;
174 if (res_op
->cond
.else_value
175 && VECTOR_TYPE_P (res_op
->type
)
176 && gimple_simplified_result_is_gimple_val (res_op
))
178 new_op
.set_op (VEC_COND_EXPR
, res_op
->type
,
179 res_op
->cond
.cond
, res_op
->ops
[0],
180 res_op
->cond
.else_value
);
182 return gimple_resimplify3 (seq
, res_op
, valueize
);
185 /* Otherwise try rewriting the operation as an IFN_COND_* call.
186 Again, this isn't a simplification in itself, since it's what
187 RES_OP already described. */
188 if (convert_conditional_op (res_op
, &new_op
))
194 /* Helper that matches and simplifies the toplevel result from
195 a gimple_simplify run (where we don't want to build
196 a stmt in case it's used in in-place folding). Replaces
197 RES_OP with a simplified and/or canonicalized result and
198 returns whether any change was made. */
201 gimple_resimplify1 (gimple_seq
*seq
, gimple_match_op
*res_op
,
202 tree (*valueize
)(tree
))
204 if (constant_for_folding (res_op
->ops
[0]))
206 tree tem
= NULL_TREE
;
207 if (res_op
->code
.is_tree_code ())
209 tree_code code
= res_op
->code
;
210 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
211 && TREE_CODE_LENGTH (code
) == 1)
212 tem
= const_unop (res_op
->code
, res_op
->type
, res_op
->ops
[0]);
215 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
218 && CONSTANT_CLASS_P (tem
))
220 if (TREE_OVERFLOW_P (tem
))
221 tem
= drop_tree_overflow (tem
);
222 res_op
->set_value (tem
);
223 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
228 /* Limit recursion, there are cases like PR80887 and others, for
229 example when value-numbering presents us with unfolded expressions
230 that we are really not prepared to handle without eventual
231 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
232 itself as available expression. */
233 static unsigned depth
;
236 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
237 fprintf (dump_file
, "Aborting expression simplification due to "
243 gimple_match_op
res_op2 (*res_op
);
244 if (gimple_simplify (&res_op2
, seq
, valueize
,
245 res_op
->code
, res_op
->type
, res_op
->ops
[0]))
253 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
259 /* Helper that matches and simplifies the toplevel result from
260 a gimple_simplify run (where we don't want to build
261 a stmt in case it's used in in-place folding). Replaces
262 RES_OP with a simplified and/or canonicalized result and
263 returns whether any change was made. */
266 gimple_resimplify2 (gimple_seq
*seq
, gimple_match_op
*res_op
,
267 tree (*valueize
)(tree
))
269 if (constant_for_folding (res_op
->ops
[0])
270 && constant_for_folding (res_op
->ops
[1]))
272 tree tem
= NULL_TREE
;
273 if (res_op
->code
.is_tree_code ())
275 tree_code code
= res_op
->code
;
276 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
277 && TREE_CODE_LENGTH (code
) == 2)
278 tem
= const_binop (res_op
->code
, res_op
->type
,
279 res_op
->ops
[0], res_op
->ops
[1]);
282 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
283 res_op
->ops
[0], res_op
->ops
[1]);
285 && CONSTANT_CLASS_P (tem
))
287 if (TREE_OVERFLOW_P (tem
))
288 tem
= drop_tree_overflow (tem
);
289 res_op
->set_value (tem
);
290 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
295 /* Canonicalize operand order. */
296 bool canonicalized
= false;
297 if (res_op
->code
.is_tree_code ()
298 && (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
299 || commutative_tree_code (res_op
->code
))
300 && tree_swap_operands_p (res_op
->ops
[0], res_op
->ops
[1]))
302 std::swap (res_op
->ops
[0], res_op
->ops
[1]);
303 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
)
304 res_op
->code
= swap_tree_comparison (res_op
->code
);
305 canonicalized
= true;
308 /* Limit recursion, see gimple_resimplify1. */
309 static unsigned depth
;
312 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
313 fprintf (dump_file
, "Aborting expression simplification due to "
319 gimple_match_op
res_op2 (*res_op
);
320 if (gimple_simplify (&res_op2
, seq
, valueize
,
321 res_op
->code
, res_op
->type
,
322 res_op
->ops
[0], res_op
->ops
[1]))
330 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
333 return canonicalized
;
336 /* Helper that matches and simplifies the toplevel result from
337 a gimple_simplify run (where we don't want to build
338 a stmt in case it's used in in-place folding). Replaces
339 RES_OP with a simplified and/or canonicalized result and
340 returns whether any change was made. */
343 gimple_resimplify3 (gimple_seq
*seq
, gimple_match_op
*res_op
,
344 tree (*valueize
)(tree
))
346 if (constant_for_folding (res_op
->ops
[0])
347 && constant_for_folding (res_op
->ops
[1])
348 && constant_for_folding (res_op
->ops
[2]))
350 tree tem
= NULL_TREE
;
351 if (res_op
->code
.is_tree_code ())
353 tree_code code
= res_op
->code
;
354 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
355 && TREE_CODE_LENGTH (code
) == 3)
356 tem
= fold_ternary
/*_to_constant*/ (res_op
->code
, res_op
->type
,
357 res_op
->ops
[0], res_op
->ops
[1],
361 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
362 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]);
364 && CONSTANT_CLASS_P (tem
))
366 if (TREE_OVERFLOW_P (tem
))
367 tem
= drop_tree_overflow (tem
);
368 res_op
->set_value (tem
);
369 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
374 /* Canonicalize operand order. */
375 bool canonicalized
= false;
376 if (res_op
->code
.is_tree_code ()
377 && commutative_ternary_tree_code (res_op
->code
)
378 && tree_swap_operands_p (res_op
->ops
[0], res_op
->ops
[1]))
380 std::swap (res_op
->ops
[0], res_op
->ops
[1]);
381 canonicalized
= true;
384 /* Limit recursion, see gimple_resimplify1. */
385 static unsigned depth
;
388 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
389 fprintf (dump_file
, "Aborting expression simplification due to "
395 gimple_match_op
res_op2 (*res_op
);
396 if (gimple_simplify (&res_op2
, seq
, valueize
,
397 res_op
->code
, res_op
->type
,
398 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]))
406 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
409 return canonicalized
;
412 /* Helper that matches and simplifies the toplevel result from
413 a gimple_simplify run (where we don't want to build
414 a stmt in case it's used in in-place folding). Replaces
415 RES_OP with a simplified and/or canonicalized result and
416 returns whether any change was made. */
419 gimple_resimplify4 (gimple_seq
*seq
, gimple_match_op
*res_op
,
420 tree (*valueize
)(tree
))
422 /* No constant folding is defined for four-operand functions. */
424 /* Limit recursion, see gimple_resimplify1. */
425 static unsigned depth
;
428 if (dump_file
&& (dump_flags
& TDF_FOLDING
))
429 fprintf (dump_file
, "Aborting expression simplification due to "
435 gimple_match_op
res_op2 (*res_op
);
436 if (gimple_simplify (&res_op2
, seq
, valueize
,
437 res_op
->code
, res_op
->type
,
438 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2],
447 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
453 /* Helper that matches and simplifies the toplevel result from
454 a gimple_simplify run (where we don't want to build
455 a stmt in case it's used in in-place folding). Replaces
456 RES_OP with a simplified and/or canonicalized result and
457 returns whether any change was made. */
460 gimple_resimplify5 (gimple_seq
*seq
, gimple_match_op
*res_op
,
461 tree (*valueize
)(tree
))
463 /* No constant folding is defined for five-operand functions. */
465 gimple_match_op
res_op2 (*res_op
);
466 if (gimple_simplify (&res_op2
, seq
, valueize
,
467 res_op
->code
, res_op
->type
,
468 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2],
469 res_op
->ops
[3], res_op
->ops
[4]))
475 if (maybe_resimplify_conditional_op (seq
, res_op
, valueize
))
481 /* Match and simplify the toplevel valueized operation THIS.
482 Replaces THIS with a simplified and/or canonicalized result and
483 returns whether any change was made. */
486 gimple_match_op::resimplify (gimple_seq
*seq
, tree (*valueize
)(tree
))
491 return gimple_resimplify1 (seq
, this, valueize
);
493 return gimple_resimplify2 (seq
, this, valueize
);
495 return gimple_resimplify3 (seq
, this, valueize
);
497 return gimple_resimplify4 (seq
, this, valueize
);
499 return gimple_resimplify5 (seq
, this, valueize
);
505 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
506 build a GENERIC tree for that expression and update RES_OP accordingly. */
509 maybe_build_generic_op (gimple_match_op
*res_op
)
511 tree_code code
= (tree_code
) res_op
->code
;
517 case VIEW_CONVERT_EXPR
:
518 val
= build1 (code
, res_op
->type
, res_op
->ops
[0]);
519 res_op
->set_value (val
);
522 val
= build3 (code
, res_op
->type
, res_op
->ops
[0], res_op
->ops
[1],
524 REF_REVERSE_STORAGE_ORDER (val
) = res_op
->reverse
;
525 res_op
->set_value (val
);
531 tree (*mprts_hook
) (gimple_match_op
*);
533 /* Try to build RES_OP, which is known to be a call to FN. Return null
534 if the target doesn't support the function. */
537 build_call_internal (internal_fn fn
, gimple_match_op
*res_op
)
539 if (direct_internal_fn_p (fn
))
541 tree_pair types
= direct_internal_fn_types (fn
, res_op
->type
,
543 if (!direct_internal_fn_supported_p (fn
, types
, OPTIMIZE_FOR_BOTH
))
546 return gimple_build_call_internal (fn
, res_op
->num_ops
,
547 res_op
->op_or_null (0),
548 res_op
->op_or_null (1),
549 res_op
->op_or_null (2),
550 res_op
->op_or_null (3),
551 res_op
->op_or_null (4));
554 /* Push the exploded expression described by RES_OP as a statement to
555 SEQ if necessary and return a gimple value denoting the value of the
556 expression. If RES is not NULL then the result will be always RES
557 and even gimple values are pushed to SEQ. */
560 maybe_push_res_to_seq (gimple_match_op
*res_op
, gimple_seq
*seq
, tree res
)
562 tree
*ops
= res_op
->ops
;
563 unsigned num_ops
= res_op
->num_ops
;
565 /* The caller should have converted conditional operations into an UNCOND
566 form and resimplified as appropriate. The conditional form only
567 survives this far if that conversion failed. */
568 if (res_op
->cond
.cond
)
571 if (res_op
->code
.is_tree_code ())
574 && gimple_simplified_result_is_gimple_val (res_op
))
578 tree tem
= mprts_hook (res_op
);
587 /* Play safe and do not allow abnormals to be mentioned in
588 newly created statements. */
589 for (unsigned int i
= 0; i
< num_ops
; ++i
)
590 if (TREE_CODE (ops
[i
]) == SSA_NAME
591 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
]))
594 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
595 for (unsigned int i
= 0; i
< 2; ++i
)
596 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
597 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
)))
600 if (res_op
->code
.is_tree_code ())
604 if (gimple_in_ssa_p (cfun
))
605 res
= make_ssa_name (res_op
->type
);
607 res
= create_tmp_reg (res_op
->type
);
609 maybe_build_generic_op (res_op
);
610 gimple
*new_stmt
= gimple_build_assign (res
, res_op
->code
,
611 res_op
->op_or_null (0),
612 res_op
->op_or_null (1),
613 res_op
->op_or_null (2));
614 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
619 gcc_assert (num_ops
!= 0);
620 combined_fn fn
= res_op
->code
;
621 gcall
*new_stmt
= NULL
;
622 if (internal_fn_p (fn
))
624 /* Generate the given function if we can. */
625 internal_fn ifn
= as_internal_fn (fn
);
626 new_stmt
= build_call_internal (ifn
, res_op
);
632 /* Find the function we want to call. */
633 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
637 /* We can't and should not emit calls to non-const functions. */
638 if (!(flags_from_decl_or_type (decl
) & ECF_CONST
))
641 new_stmt
= gimple_build_call (decl
, num_ops
,
642 res_op
->op_or_null (0),
643 res_op
->op_or_null (1),
644 res_op
->op_or_null (2),
645 res_op
->op_or_null (3),
646 res_op
->op_or_null (4));
650 if (gimple_in_ssa_p (cfun
))
651 res
= make_ssa_name (res_op
->type
);
653 res
= create_tmp_reg (res_op
->type
);
655 gimple_call_set_lhs (new_stmt
, res
);
656 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
662 /* Public API overloads follow for operation being tree_code or
663 built_in_function and for one to three operands or arguments.
664 They return NULL_TREE if nothing could be simplified or
665 the resulting simplified value with parts pushed to SEQ.
666 If SEQ is NULL then if the simplification needs to create
667 new stmts it will fail. If VALUEIZE is non-NULL then all
668 SSA names will be valueized using that hook prior to
669 applying simplifications. */
674 gimple_simplify (enum tree_code code
, tree type
,
676 gimple_seq
*seq
, tree (*valueize
)(tree
))
678 if (constant_for_folding (op0
))
680 tree res
= const_unop (code
, type
, op0
);
682 && CONSTANT_CLASS_P (res
))
686 gimple_match_op res_op
;
687 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
))
689 return maybe_push_res_to_seq (&res_op
, seq
);
695 gimple_simplify (enum tree_code code
, tree type
,
697 gimple_seq
*seq
, tree (*valueize
)(tree
))
699 if (constant_for_folding (op0
) && constant_for_folding (op1
))
701 tree res
= const_binop (code
, type
, op0
, op1
);
703 && CONSTANT_CLASS_P (res
))
707 /* Canonicalize operand order both for matching and fallback stmt
709 if ((commutative_tree_code (code
)
710 || TREE_CODE_CLASS (code
) == tcc_comparison
)
711 && tree_swap_operands_p (op0
, op1
))
713 std::swap (op0
, op1
);
714 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
715 code
= swap_tree_comparison (code
);
718 gimple_match_op res_op
;
719 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
))
721 return maybe_push_res_to_seq (&res_op
, seq
);
727 gimple_simplify (enum tree_code code
, tree type
,
728 tree op0
, tree op1
, tree op2
,
729 gimple_seq
*seq
, tree (*valueize
)(tree
))
731 if (constant_for_folding (op0
) && constant_for_folding (op1
)
732 && constant_for_folding (op2
))
734 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
736 && CONSTANT_CLASS_P (res
))
740 /* Canonicalize operand order both for matching and fallback stmt
742 if (commutative_ternary_tree_code (code
)
743 && tree_swap_operands_p (op0
, op1
))
744 std::swap (op0
, op1
);
746 gimple_match_op res_op
;
747 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
, op2
))
749 return maybe_push_res_to_seq (&res_op
, seq
);
752 /* Builtin or internal function with one argument. */
755 gimple_simplify (combined_fn fn
, tree type
,
757 gimple_seq
*seq
, tree (*valueize
)(tree
))
759 if (constant_for_folding (arg0
))
761 tree res
= fold_const_call (fn
, type
, arg0
);
762 if (res
&& CONSTANT_CLASS_P (res
))
766 gimple_match_op res_op
;
767 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
))
769 return maybe_push_res_to_seq (&res_op
, seq
);
772 /* Builtin or internal function with two arguments. */
775 gimple_simplify (combined_fn fn
, tree type
,
776 tree arg0
, tree arg1
,
777 gimple_seq
*seq
, tree (*valueize
)(tree
))
779 if (constant_for_folding (arg0
)
780 && constant_for_folding (arg1
))
782 tree res
= fold_const_call (fn
, type
, arg0
, arg1
);
783 if (res
&& CONSTANT_CLASS_P (res
))
787 gimple_match_op res_op
;
788 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
))
790 return maybe_push_res_to_seq (&res_op
, seq
);
793 /* Builtin or internal function with three arguments. */
796 gimple_simplify (combined_fn fn
, tree type
,
797 tree arg0
, tree arg1
, tree arg2
,
798 gimple_seq
*seq
, tree (*valueize
)(tree
))
800 if (constant_for_folding (arg0
)
801 && constant_for_folding (arg1
)
802 && constant_for_folding (arg2
))
804 tree res
= fold_const_call (fn
, type
, arg0
, arg1
, arg2
);
805 if (res
&& CONSTANT_CLASS_P (res
))
809 gimple_match_op res_op
;
810 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
, arg2
))
812 return maybe_push_res_to_seq (&res_op
, seq
);
815 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
816 VALUEIZED to true if valueization changed OP. */
819 do_valueize (tree op
, tree (*valueize
)(tree
), bool &valueized
)
821 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
823 tree tem
= valueize (op
);
824 if (tem
&& tem
!= op
)
833 /* If RES_OP is a call to a conditional internal function, try simplifying
834 the associated unconditional operation and using the result to build
835 a new conditional operation. For example, if RES_OP is:
837 IFN_COND_ADD (COND, A, B, ELSE)
839 try simplifying (plus A B) and using the result to build a replacement
840 for the whole IFN_COND_ADD.
842 Return true if this approach led to a simplification, otherwise leave
843 RES_OP unchanged (and so suitable for other simplifications). When
844 returning true, add any new statements to SEQ and use VALUEIZE as the
845 valueization function.
847 RES_OP is known to be a call to IFN. */
850 try_conditional_simplification (internal_fn ifn
, gimple_match_op
*res_op
,
851 gimple_seq
*seq
, tree (*valueize
) (tree
))
854 tree_code code
= conditional_internal_fn_code (ifn
);
855 if (code
!= ERROR_MARK
)
859 ifn
= get_unconditional_internal_fn (ifn
);
862 op
= as_combined_fn (ifn
);
865 unsigned int num_ops
= res_op
->num_ops
;
866 gimple_match_op
cond_op (gimple_match_cond (res_op
->ops
[0],
867 res_op
->ops
[num_ops
- 1]),
868 op
, res_op
->type
, num_ops
- 2);
870 memcpy (cond_op
.ops
, res_op
->ops
+ 1, (num_ops
- 1) * sizeof *cond_op
.ops
);
874 if (!gimple_resimplify2 (seq
, &cond_op
, valueize
))
878 if (!gimple_resimplify3 (seq
, &cond_op
, valueize
))
885 maybe_resimplify_conditional_op (seq
, res_op
, valueize
);
889 /* The main STMT based simplification entry. It is used by the fold_stmt
890 and the fold_stmt_to_constant APIs. */
893 gimple_simplify (gimple
*stmt
, gimple_match_op
*res_op
, gimple_seq
*seq
,
894 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
896 switch (gimple_code (stmt
))
900 enum tree_code code
= gimple_assign_rhs_code (stmt
);
901 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
902 switch (gimple_assign_rhs_class (stmt
))
904 case GIMPLE_SINGLE_RHS
:
905 if (code
== REALPART_EXPR
906 || code
== IMAGPART_EXPR
907 || code
== VIEW_CONVERT_EXPR
)
909 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
910 bool valueized
= false;
911 op0
= do_valueize (op0
, top_valueize
, valueized
);
912 res_op
->set_op (code
, type
, op0
);
913 return (gimple_resimplify1 (seq
, res_op
, valueize
)
916 else if (code
== BIT_FIELD_REF
)
918 tree rhs1
= gimple_assign_rhs1 (stmt
);
919 tree op0
= TREE_OPERAND (rhs1
, 0);
920 bool valueized
= false;
921 op0
= do_valueize (op0
, top_valueize
, valueized
);
922 res_op
->set_op (code
, type
, op0
,
923 TREE_OPERAND (rhs1
, 1),
924 TREE_OPERAND (rhs1
, 2),
925 REF_REVERSE_STORAGE_ORDER (rhs1
));
928 return (gimple_resimplify3 (seq
, res_op
, valueize
)
931 else if (code
== SSA_NAME
934 tree op0
= gimple_assign_rhs1 (stmt
);
935 tree valueized
= top_valueize (op0
);
936 if (!valueized
|| op0
== valueized
)
938 res_op
->set_op (TREE_CODE (op0
), type
, valueized
);
942 case GIMPLE_UNARY_RHS
:
944 tree rhs1
= gimple_assign_rhs1 (stmt
);
945 bool valueized
= false;
946 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
947 res_op
->set_op (code
, type
, rhs1
);
948 return (gimple_resimplify1 (seq
, res_op
, valueize
)
951 case GIMPLE_BINARY_RHS
:
953 tree rhs1
= gimple_assign_rhs1 (stmt
);
954 tree rhs2
= gimple_assign_rhs2 (stmt
);
955 bool valueized
= false;
956 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
957 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
958 res_op
->set_op (code
, type
, rhs1
, rhs2
);
959 return (gimple_resimplify2 (seq
, res_op
, valueize
)
962 case GIMPLE_TERNARY_RHS
:
964 bool valueized
= false;
965 tree rhs1
= gimple_assign_rhs1 (stmt
);
966 /* If this is a COND_EXPR first try to simplify an
967 embedded GENERIC condition. */
968 if (code
== COND_EXPR
)
970 if (COMPARISON_CLASS_P (rhs1
))
972 tree lhs
= TREE_OPERAND (rhs1
, 0);
973 tree rhs
= TREE_OPERAND (rhs1
, 1);
974 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
975 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
976 gimple_match_op
res_op2 (res_op
->cond
, TREE_CODE (rhs1
),
977 TREE_TYPE (rhs1
), lhs
, rhs
);
978 if ((gimple_resimplify2 (seq
, &res_op2
, valueize
)
980 && res_op2
.code
.is_tree_code ())
983 if (TREE_CODE_CLASS ((enum tree_code
) res_op2
.code
)
985 rhs1
= build2 (res_op2
.code
, TREE_TYPE (rhs1
),
986 res_op2
.ops
[0], res_op2
.ops
[1]);
987 else if (res_op2
.code
== SSA_NAME
988 || res_op2
.code
== INTEGER_CST
989 || res_op2
.code
== VECTOR_CST
)
990 rhs1
= res_op2
.ops
[0];
996 tree rhs2
= gimple_assign_rhs2 (stmt
);
997 tree rhs3
= gimple_assign_rhs3 (stmt
);
998 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
999 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
1000 rhs3
= do_valueize (rhs3
, top_valueize
, valueized
);
1001 res_op
->set_op (code
, type
, rhs1
, rhs2
, rhs3
);
1002 return (gimple_resimplify3 (seq
, res_op
, valueize
)
1012 /* ??? This way we can't simplify calls with side-effects. */
1013 if (gimple_call_lhs (stmt
) != NULL_TREE
1014 && gimple_call_num_args (stmt
) >= 1
1015 && gimple_call_num_args (stmt
) <= 5)
1017 bool valueized
= false;
1019 if (gimple_call_internal_p (stmt
))
1020 cfn
= as_combined_fn (gimple_call_internal_fn (stmt
));
1023 tree fn
= gimple_call_fn (stmt
);
1027 fn
= do_valueize (fn
, top_valueize
, valueized
);
1028 if (TREE_CODE (fn
) != ADDR_EXPR
1029 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
1032 tree decl
= TREE_OPERAND (fn
, 0);
1033 if (DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_NORMAL
1034 || !gimple_builtin_call_types_compatible_p (stmt
, decl
))
1037 cfn
= as_combined_fn (DECL_FUNCTION_CODE (decl
));
1040 unsigned int num_args
= gimple_call_num_args (stmt
);
1041 res_op
->set_op (cfn
, TREE_TYPE (gimple_call_lhs (stmt
)), num_args
);
1042 for (unsigned i
= 0; i
< num_args
; ++i
)
1044 tree arg
= gimple_call_arg (stmt
, i
);
1045 res_op
->ops
[i
] = do_valueize (arg
, top_valueize
, valueized
);
1047 if (internal_fn_p (cfn
)
1048 && try_conditional_simplification (as_internal_fn (cfn
),
1049 res_op
, seq
, valueize
))
1054 return (gimple_resimplify1 (seq
, res_op
, valueize
)
1057 return (gimple_resimplify2 (seq
, res_op
, valueize
)
1060 return (gimple_resimplify3 (seq
, res_op
, valueize
)
1063 return (gimple_resimplify4 (seq
, res_op
, valueize
)
1066 return (gimple_resimplify5 (seq
, res_op
, valueize
)
1076 tree lhs
= gimple_cond_lhs (stmt
);
1077 tree rhs
= gimple_cond_rhs (stmt
);
1078 bool valueized
= false;
1079 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
1080 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
1081 res_op
->set_op (gimple_cond_code (stmt
), boolean_type_node
, lhs
, rhs
);
1082 return (gimple_resimplify2 (seq
, res_op
, valueize
)
1094 /* Helper for the autogenerated code, valueize OP. */
1097 do_valueize (tree (*valueize
)(tree
), tree op
)
1099 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
1101 tree tem
= valueize (op
);
1108 /* Helper for the autogenerated code, get at the definition of NAME when
1109 VALUEIZE allows that. */
1112 get_def (tree (*valueize
)(tree
), tree name
)
1114 if (valueize
&& ! valueize (name
))
1116 return SSA_NAME_DEF_STMT (name
);
1119 /* Routine to determine if the types T1 and T2 are effectively
1120 the same for GIMPLE. If T1 or T2 is not a type, the test
1121 applies to their TREE_TYPE. */
1124 types_match (tree t1
, tree t2
)
1127 t1
= TREE_TYPE (t1
);
1129 t2
= TREE_TYPE (t2
);
1131 return types_compatible_p (t1
, t2
);
1134 /* Return if T has a single use. For GIMPLE, we also allow any
1135 non-SSA_NAME (ie constants) and zero uses to cope with uses
1136 that aren't linked up yet. */
1141 return TREE_CODE (t
) != SSA_NAME
|| has_zero_uses (t
) || has_single_use (t
);
1144 /* Return true if math operations should be canonicalized,
1145 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1148 canonicalize_math_p ()
1150 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_opt_math
) == 0;
1153 /* Return true if math operations that are beneficial only after
1154 vectorization should be canonicalized. */
1157 canonicalize_math_after_vectorization_p ()
1159 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) != 0;
1162 /* Return true if we can still perform transformations that may introduce
1163 vector operations that are not supported by the target. Vector lowering
1164 normally handles those, but after that pass, it becomes unsafe. */
1167 optimize_vectors_before_lowering_p ()
1169 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) == 0;
1172 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1173 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1174 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1175 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1176 will likely be exact, while exp (log (arg0) * arg1) might be not.
1177 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1180 optimize_pow_to_exp (tree arg0
, tree arg1
)
1182 gcc_assert (TREE_CODE (arg0
) == REAL_CST
);
1183 if (!real_isinteger (TREE_REAL_CST_PTR (arg0
), TYPE_MODE (TREE_TYPE (arg0
))))
1186 if (TREE_CODE (arg1
) != SSA_NAME
)
1189 gimple
*def
= SSA_NAME_DEF_STMT (arg1
);
1190 gphi
*phi
= dyn_cast
<gphi
*> (def
);
1191 tree cst1
= NULL_TREE
;
1192 enum tree_code code
= ERROR_MARK
;
1195 if (!is_gimple_assign (def
))
1197 code
= gimple_assign_rhs_code (def
);
1206 if (TREE_CODE (gimple_assign_rhs1 (def
)) != SSA_NAME
1207 || TREE_CODE (gimple_assign_rhs2 (def
)) != REAL_CST
)
1210 cst1
= gimple_assign_rhs2 (def
);
1212 phi
= dyn_cast
<gphi
*> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def
)));
1217 tree cst2
= NULL_TREE
;
1218 int n
= gimple_phi_num_args (phi
);
1219 for (int i
= 0; i
< n
; i
++)
1221 tree arg
= PHI_ARG_DEF (phi
, i
);
1222 if (TREE_CODE (arg
) != REAL_CST
)
1224 else if (cst2
== NULL_TREE
)
1226 else if (!operand_equal_p (cst2
, arg
, 0))
1231 cst2
= const_binop (code
, TREE_TYPE (cst2
), cst2
, cst1
);
1233 && TREE_CODE (cst2
) == REAL_CST
1234 && real_isinteger (TREE_REAL_CST_PTR (cst2
),
1235 TYPE_MODE (TREE_TYPE (cst2
))))
1240 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1241 is another division can be optimized. Don't optimize if INNER_DIV
1242 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1245 optimize_successive_divisions_p (tree divisor
, tree inner_div
)
1247 if (!gimple_in_ssa_p (cfun
))
1250 imm_use_iterator imm_iter
;
1251 use_operand_p use_p
;
1252 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, inner_div
)
1254 gimple
*use_stmt
= USE_STMT (use_p
);
1255 if (!is_gimple_assign (use_stmt
)
1256 || gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
1257 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), divisor
, 0))