1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "fold-const-call.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
37 #include "gimple-match.h"
38 #include "tree-pass.h"
39 #include "internal-fn.h"
40 #include "case-cfn-macros.h"
42 #include "optabs-tree.h"
45 /* Forward declarations of the private auto-generated matchers.
46 They expect valueized operands in canonical order and do not
47 perform simplification of all-constant operands. */
48 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
49 code_helper
, tree
, tree
);
50 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
51 code_helper
, tree
, tree
, tree
);
52 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
53 code_helper
, tree
, tree
, tree
, tree
);
54 static bool gimple_simplify (gimple_match_op
*, gimple_seq
*, tree (*)(tree
),
55 code_helper
, tree
, tree
, tree
, tree
, tree
);
57 const unsigned int gimple_match_op::MAX_NUM_OPS
;
59 /* Return whether T is a constant that we'll dispatch to fold to
60 evaluate fully constant expressions. */
63 constant_for_folding (tree t
)
65 return (CONSTANT_CLASS_P (t
)
66 /* The following is only interesting to string builtins. */
67 || (TREE_CODE (t
) == ADDR_EXPR
68 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
72 /* Helper that matches and simplifies the toplevel result from
73 a gimple_simplify run (where we don't want to build
74 a stmt in case it's used in in-place folding). Replaces
75 RES_OP with a simplified and/or canonicalized result and
76 returns whether any change was made. */
79 gimple_resimplify1 (gimple_seq
*seq
, gimple_match_op
*res_op
,
80 tree (*valueize
)(tree
))
82 if (constant_for_folding (res_op
->ops
[0]))
85 if (res_op
->code
.is_tree_code ())
86 tem
= const_unop (res_op
->code
, res_op
->type
, res_op
->ops
[0]);
88 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
91 && CONSTANT_CLASS_P (tem
))
93 if (TREE_OVERFLOW_P (tem
))
94 tem
= drop_tree_overflow (tem
);
95 res_op
->set_value (tem
);
100 gimple_match_op
res_op2 (*res_op
);
101 if (gimple_simplify (&res_op2
, seq
, valueize
,
102 res_op
->code
, res_op
->type
, res_op
->ops
[0]))
111 /* Helper that matches and simplifies the toplevel result from
112 a gimple_simplify run (where we don't want to build
113 a stmt in case it's used in in-place folding). Replaces
114 RES_OP with a simplified and/or canonicalized result and
115 returns whether any change was made. */
118 gimple_resimplify2 (gimple_seq
*seq
, gimple_match_op
*res_op
,
119 tree (*valueize
)(tree
))
121 if (constant_for_folding (res_op
->ops
[0])
122 && constant_for_folding (res_op
->ops
[1]))
124 tree tem
= NULL_TREE
;
125 if (res_op
->code
.is_tree_code ())
126 tem
= const_binop (res_op
->code
, res_op
->type
,
127 res_op
->ops
[0], res_op
->ops
[1]);
129 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
130 res_op
->ops
[0], res_op
->ops
[1]);
132 && CONSTANT_CLASS_P (tem
))
134 if (TREE_OVERFLOW_P (tem
))
135 tem
= drop_tree_overflow (tem
);
136 res_op
->set_value (tem
);
141 /* Canonicalize operand order. */
142 bool canonicalized
= false;
143 if (res_op
->code
.is_tree_code ()
144 && (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
145 || commutative_tree_code (res_op
->code
))
146 && tree_swap_operands_p (res_op
->ops
[0], res_op
->ops
[1]))
148 std::swap (res_op
->ops
[0], res_op
->ops
[1]);
149 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
)
150 res_op
->code
= swap_tree_comparison (res_op
->code
);
151 canonicalized
= true;
154 gimple_match_op
res_op2 (*res_op
);
155 if (gimple_simplify (&res_op2
, seq
, valueize
,
156 res_op
->code
, res_op
->type
,
157 res_op
->ops
[0], res_op
->ops
[1]))
163 return canonicalized
;
166 /* Helper that matches and simplifies the toplevel result from
167 a gimple_simplify run (where we don't want to build
168 a stmt in case it's used in in-place folding). Replaces
169 RES_OP with a simplified and/or canonicalized result and
170 returns whether any change was made. */
173 gimple_resimplify3 (gimple_seq
*seq
, gimple_match_op
*res_op
,
174 tree (*valueize
)(tree
))
176 if (constant_for_folding (res_op
->ops
[0])
177 && constant_for_folding (res_op
->ops
[1])
178 && constant_for_folding (res_op
->ops
[2]))
180 tree tem
= NULL_TREE
;
181 if (res_op
->code
.is_tree_code ())
182 tem
= fold_ternary
/*_to_constant*/ (res_op
->code
, res_op
->type
,
183 res_op
->ops
[0], res_op
->ops
[1],
186 tem
= fold_const_call (combined_fn (res_op
->code
), res_op
->type
,
187 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]);
189 && CONSTANT_CLASS_P (tem
))
191 if (TREE_OVERFLOW_P (tem
))
192 tem
= drop_tree_overflow (tem
);
193 res_op
->set_value (tem
);
198 /* Canonicalize operand order. */
199 bool canonicalized
= false;
200 if (res_op
->code
.is_tree_code ()
201 && commutative_ternary_tree_code (res_op
->code
)
202 && tree_swap_operands_p (res_op
->ops
[0], res_op
->ops
[1]))
204 std::swap (res_op
->ops
[0], res_op
->ops
[1]);
205 canonicalized
= true;
208 gimple_match_op
res_op2 (*res_op
);
209 if (gimple_simplify (&res_op2
, seq
, valueize
,
210 res_op
->code
, res_op
->type
,
211 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2]))
217 return canonicalized
;
220 /* Helper that matches and simplifies the toplevel result from
221 a gimple_simplify run (where we don't want to build
222 a stmt in case it's used in in-place folding). Replaces
223 RES_OP with a simplified and/or canonicalized result and
224 returns whether any change was made. */
227 gimple_resimplify4 (gimple_seq
*seq
, gimple_match_op
*res_op
,
228 tree (*valueize
)(tree
))
230 /* No constant folding is defined for four-operand functions. */
232 gimple_match_op
res_op2 (*res_op
);
233 if (gimple_simplify (&res_op2
, seq
, valueize
,
234 res_op
->code
, res_op
->type
,
235 res_op
->ops
[0], res_op
->ops
[1], res_op
->ops
[2],
245 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
246 build a GENERIC tree for that expression and update RES_OP accordingly. */
249 maybe_build_generic_op (gimple_match_op
*res_op
)
251 tree_code code
= (tree_code
) res_op
->code
;
256 case VIEW_CONVERT_EXPR
:
257 res_op
->set_value (build1 (code
, res_op
->type
, res_op
->ops
[0]));
260 res_op
->set_value (build3 (code
, res_op
->type
, res_op
->ops
[0],
261 res_op
->ops
[1], res_op
->ops
[2]));
267 tree (*mprts_hook
) (gimple_match_op
*);
269 /* Try to build RES_OP, which is known to be a call to FN. Return null
270 if the target doesn't support the function. */
273 build_call_internal (internal_fn fn
, gimple_match_op
*res_op
)
275 if (direct_internal_fn_p (fn
))
277 tree_pair types
= direct_internal_fn_types (fn
, res_op
->type
,
279 if (!direct_internal_fn_supported_p (fn
, types
, OPTIMIZE_FOR_BOTH
))
282 return gimple_build_call_internal (fn
, res_op
->num_ops
,
283 res_op
->op_or_null (0),
284 res_op
->op_or_null (1),
285 res_op
->op_or_null (2),
286 res_op
->op_or_null (3));
289 /* Push the exploded expression described by RES_OP as a statement to
290 SEQ if necessary and return a gimple value denoting the value of the
291 expression. If RES is not NULL then the result will be always RES
292 and even gimple values are pushed to SEQ. */
295 maybe_push_res_to_seq (gimple_match_op
*res_op
, gimple_seq
*seq
, tree res
)
297 tree
*ops
= res_op
->ops
;
298 unsigned num_ops
= res_op
->num_ops
;
300 if (res_op
->code
.is_tree_code ())
303 && gimple_simplified_result_is_gimple_val (res_op
))
307 tree tem
= mprts_hook (res_op
);
316 /* Play safe and do not allow abnormals to be mentioned in
317 newly created statements. */
318 for (unsigned int i
= 0; i
< num_ops
; ++i
)
319 if (TREE_CODE (ops
[i
]) == SSA_NAME
320 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
]))
323 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
324 for (unsigned int i
= 0; i
< 2; ++i
)
325 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
326 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
)))
329 if (res_op
->code
.is_tree_code ())
333 if (gimple_in_ssa_p (cfun
))
334 res
= make_ssa_name (res_op
->type
);
336 res
= create_tmp_reg (res_op
->type
);
338 maybe_build_generic_op (res_op
);
339 gimple
*new_stmt
= gimple_build_assign (res
, res_op
->code
,
340 res_op
->op_or_null (0),
341 res_op
->op_or_null (1),
342 res_op
->op_or_null (2));
343 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
348 gcc_assert (num_ops
!= 0);
349 combined_fn fn
= res_op
->code
;
350 gcall
*new_stmt
= NULL
;
351 if (internal_fn_p (fn
))
353 /* Generate the given function if we can. */
354 internal_fn ifn
= as_internal_fn (fn
);
355 new_stmt
= build_call_internal (ifn
, res_op
);
361 /* Find the function we want to call. */
362 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
366 /* We can't and should not emit calls to non-const functions. */
367 if (!(flags_from_decl_or_type (decl
) & ECF_CONST
))
370 new_stmt
= gimple_build_call (decl
, num_ops
,
371 res_op
->op_or_null (0),
372 res_op
->op_or_null (1),
373 res_op
->op_or_null (2),
374 res_op
->op_or_null (3));
378 if (gimple_in_ssa_p (cfun
))
379 res
= make_ssa_name (res_op
->type
);
381 res
= create_tmp_reg (res_op
->type
);
383 gimple_call_set_lhs (new_stmt
, res
);
384 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
390 /* Public API overloads follow for operation being tree_code or
391 built_in_function and for one to three operands or arguments.
392 They return NULL_TREE if nothing could be simplified or
393 the resulting simplified value with parts pushed to SEQ.
394 If SEQ is NULL then if the simplification needs to create
395 new stmts it will fail. If VALUEIZE is non-NULL then all
396 SSA names will be valueized using that hook prior to
397 applying simplifications. */
402 gimple_simplify (enum tree_code code
, tree type
,
404 gimple_seq
*seq
, tree (*valueize
)(tree
))
406 if (constant_for_folding (op0
))
408 tree res
= const_unop (code
, type
, op0
);
410 && CONSTANT_CLASS_P (res
))
414 gimple_match_op res_op
;
415 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
))
417 return maybe_push_res_to_seq (&res_op
, seq
);
423 gimple_simplify (enum tree_code code
, tree type
,
425 gimple_seq
*seq
, tree (*valueize
)(tree
))
427 if (constant_for_folding (op0
) && constant_for_folding (op1
))
429 tree res
= const_binop (code
, type
, op0
, op1
);
431 && CONSTANT_CLASS_P (res
))
435 /* Canonicalize operand order both for matching and fallback stmt
437 if ((commutative_tree_code (code
)
438 || TREE_CODE_CLASS (code
) == tcc_comparison
)
439 && tree_swap_operands_p (op0
, op1
))
441 std::swap (op0
, op1
);
442 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
443 code
= swap_tree_comparison (code
);
446 gimple_match_op res_op
;
447 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
))
449 return maybe_push_res_to_seq (&res_op
, seq
);
455 gimple_simplify (enum tree_code code
, tree type
,
456 tree op0
, tree op1
, tree op2
,
457 gimple_seq
*seq
, tree (*valueize
)(tree
))
459 if (constant_for_folding (op0
) && constant_for_folding (op1
)
460 && constant_for_folding (op2
))
462 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
464 && CONSTANT_CLASS_P (res
))
468 /* Canonicalize operand order both for matching and fallback stmt
470 if (commutative_ternary_tree_code (code
)
471 && tree_swap_operands_p (op0
, op1
))
472 std::swap (op0
, op1
);
474 gimple_match_op res_op
;
475 if (!gimple_simplify (&res_op
, seq
, valueize
, code
, type
, op0
, op1
, op2
))
477 return maybe_push_res_to_seq (&res_op
, seq
);
480 /* Builtin or internal function with one argument. */
483 gimple_simplify (combined_fn fn
, tree type
,
485 gimple_seq
*seq
, tree (*valueize
)(tree
))
487 if (constant_for_folding (arg0
))
489 tree res
= fold_const_call (fn
, type
, arg0
);
490 if (res
&& CONSTANT_CLASS_P (res
))
494 gimple_match_op res_op
;
495 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
))
497 return maybe_push_res_to_seq (&res_op
, seq
);
500 /* Builtin or internal function with two arguments. */
503 gimple_simplify (combined_fn fn
, tree type
,
504 tree arg0
, tree arg1
,
505 gimple_seq
*seq
, tree (*valueize
)(tree
))
507 if (constant_for_folding (arg0
)
508 && constant_for_folding (arg1
))
510 tree res
= fold_const_call (fn
, type
, arg0
, arg1
);
511 if (res
&& CONSTANT_CLASS_P (res
))
515 gimple_match_op res_op
;
516 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
))
518 return maybe_push_res_to_seq (&res_op
, seq
);
521 /* Builtin or internal function with three arguments. */
524 gimple_simplify (combined_fn fn
, tree type
,
525 tree arg0
, tree arg1
, tree arg2
,
526 gimple_seq
*seq
, tree (*valueize
)(tree
))
528 if (constant_for_folding (arg0
)
529 && constant_for_folding (arg1
)
530 && constant_for_folding (arg2
))
532 tree res
= fold_const_call (fn
, type
, arg0
, arg1
, arg2
);
533 if (res
&& CONSTANT_CLASS_P (res
))
537 gimple_match_op res_op
;
538 if (!gimple_simplify (&res_op
, seq
, valueize
, fn
, type
, arg0
, arg1
, arg2
))
540 return maybe_push_res_to_seq (&res_op
, seq
);
543 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
544 VALUEIZED to true if valueization changed OP. */
547 do_valueize (tree op
, tree (*valueize
)(tree
), bool &valueized
)
549 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
551 tree tem
= valueize (op
);
552 if (tem
&& tem
!= op
)
561 /* The main STMT based simplification entry. It is used by the fold_stmt
562 and the fold_stmt_to_constant APIs. */
565 gimple_simplify (gimple
*stmt
, gimple_match_op
*res_op
, gimple_seq
*seq
,
566 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
568 switch (gimple_code (stmt
))
572 enum tree_code code
= gimple_assign_rhs_code (stmt
);
573 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
574 switch (gimple_assign_rhs_class (stmt
))
576 case GIMPLE_SINGLE_RHS
:
577 if (code
== REALPART_EXPR
578 || code
== IMAGPART_EXPR
579 || code
== VIEW_CONVERT_EXPR
)
581 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
582 bool valueized
= false;
583 op0
= do_valueize (op0
, top_valueize
, valueized
);
584 res_op
->set_op (code
, type
, op0
);
585 return (gimple_resimplify1 (seq
, res_op
, valueize
)
588 else if (code
== BIT_FIELD_REF
)
590 tree rhs1
= gimple_assign_rhs1 (stmt
);
591 tree op0
= TREE_OPERAND (rhs1
, 0);
592 bool valueized
= false;
593 op0
= do_valueize (op0
, top_valueize
, valueized
);
594 res_op
->set_op (code
, type
, op0
,
595 TREE_OPERAND (rhs1
, 1),
596 TREE_OPERAND (rhs1
, 2));
597 return (gimple_resimplify3 (seq
, res_op
, valueize
)
600 else if (code
== SSA_NAME
603 tree op0
= gimple_assign_rhs1 (stmt
);
604 tree valueized
= top_valueize (op0
);
605 if (!valueized
|| op0
== valueized
)
607 res_op
->set_op (TREE_CODE (op0
), type
, valueized
);
611 case GIMPLE_UNARY_RHS
:
613 tree rhs1
= gimple_assign_rhs1 (stmt
);
614 bool valueized
= false;
615 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
616 res_op
->set_op (code
, type
, rhs1
);
617 return (gimple_resimplify1 (seq
, res_op
, valueize
)
620 case GIMPLE_BINARY_RHS
:
622 tree rhs1
= gimple_assign_rhs1 (stmt
);
623 tree rhs2
= gimple_assign_rhs2 (stmt
);
624 bool valueized
= false;
625 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
626 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
627 res_op
->set_op (code
, type
, rhs1
, rhs2
);
628 return (gimple_resimplify2 (seq
, res_op
, valueize
)
631 case GIMPLE_TERNARY_RHS
:
633 bool valueized
= false;
634 tree rhs1
= gimple_assign_rhs1 (stmt
);
635 /* If this is a [VEC_]COND_EXPR first try to simplify an
636 embedded GENERIC condition. */
637 if (code
== COND_EXPR
638 || code
== VEC_COND_EXPR
)
640 if (COMPARISON_CLASS_P (rhs1
))
642 tree lhs
= TREE_OPERAND (rhs1
, 0);
643 tree rhs
= TREE_OPERAND (rhs1
, 1);
644 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
645 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
646 gimple_match_op
res_op2 (TREE_CODE (rhs1
),
647 TREE_TYPE (rhs1
), lhs
, rhs
);
648 if ((gimple_resimplify2 (seq
, &res_op2
, valueize
)
650 && res_op2
.code
.is_tree_code ())
653 if (TREE_CODE_CLASS ((enum tree_code
) res_op2
.code
)
655 rhs1
= build2 (res_op2
.code
, TREE_TYPE (rhs1
),
656 res_op2
.ops
[0], res_op2
.ops
[1]);
657 else if (res_op2
.code
== SSA_NAME
658 || res_op2
.code
== INTEGER_CST
659 || res_op2
.code
== VECTOR_CST
)
660 rhs1
= res_op2
.ops
[0];
666 tree rhs2
= gimple_assign_rhs2 (stmt
);
667 tree rhs3
= gimple_assign_rhs3 (stmt
);
668 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
669 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
670 rhs3
= do_valueize (rhs3
, top_valueize
, valueized
);
671 res_op
->set_op (code
, type
, rhs1
, rhs2
, rhs3
);
672 return (gimple_resimplify3 (seq
, res_op
, valueize
)
682 /* ??? This way we can't simplify calls with side-effects. */
683 if (gimple_call_lhs (stmt
) != NULL_TREE
684 && gimple_call_num_args (stmt
) >= 1
685 && gimple_call_num_args (stmt
) <= 4)
687 bool valueized
= false;
689 if (gimple_call_internal_p (stmt
))
690 cfn
= as_combined_fn (gimple_call_internal_fn (stmt
));
693 tree fn
= gimple_call_fn (stmt
);
697 fn
= do_valueize (fn
, top_valueize
, valueized
);
698 if (TREE_CODE (fn
) != ADDR_EXPR
699 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
702 tree decl
= TREE_OPERAND (fn
, 0);
703 if (DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_NORMAL
704 || !gimple_builtin_call_types_compatible_p (stmt
, decl
))
707 cfn
= as_combined_fn (DECL_FUNCTION_CODE (decl
));
710 unsigned int num_args
= gimple_call_num_args (stmt
);
711 res_op
->set_op (cfn
, TREE_TYPE (gimple_call_lhs (stmt
)), num_args
);
712 for (unsigned i
= 0; i
< num_args
; ++i
)
714 tree arg
= gimple_call_arg (stmt
, i
);
715 res_op
->ops
[i
] = do_valueize (arg
, top_valueize
, valueized
);
720 return (gimple_resimplify1 (seq
, res_op
, valueize
)
723 return (gimple_resimplify2 (seq
, res_op
, valueize
)
726 return (gimple_resimplify3 (seq
, res_op
, valueize
)
729 return (gimple_resimplify4 (seq
, res_op
, valueize
)
739 tree lhs
= gimple_cond_lhs (stmt
);
740 tree rhs
= gimple_cond_rhs (stmt
);
741 bool valueized
= false;
742 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
743 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
744 res_op
->set_op (gimple_cond_code (stmt
), boolean_type_node
, lhs
, rhs
);
745 return (gimple_resimplify2 (seq
, res_op
, valueize
)
757 /* Helper for the autogenerated code, valueize OP. */
760 do_valueize (tree (*valueize
)(tree
), tree op
)
762 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
764 tree tem
= valueize (op
);
771 /* Helper for the autogenerated code, get at the definition of NAME when
772 VALUEIZE allows that. */
775 get_def (tree (*valueize
)(tree
), tree name
)
777 if (valueize
&& ! valueize (name
))
779 return SSA_NAME_DEF_STMT (name
);
782 /* Routine to determine if the types T1 and T2 are effectively
783 the same for GIMPLE. If T1 or T2 is not a type, the test
784 applies to their TREE_TYPE. */
787 types_match (tree t1
, tree t2
)
794 return types_compatible_p (t1
, t2
);
797 /* Return if T has a single use. For GIMPLE, we also allow any
798 non-SSA_NAME (ie constants) and zero uses to cope with uses
799 that aren't linked up yet. */
804 return TREE_CODE (t
) != SSA_NAME
|| has_zero_uses (t
) || has_single_use (t
);
807 /* Return true if math operations should be canonicalized,
808 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
811 canonicalize_math_p ()
813 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_opt_math
) == 0;
816 /* Return true if math operations that are beneficial only after
817 vectorization should be canonicalized. */
820 canonicalize_math_after_vectorization_p ()
822 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) != 0;
825 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
826 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
827 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
828 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
829 will likely be exact, while exp (log (arg0) * arg1) might be not.
830 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
833 optimize_pow_to_exp (tree arg0
, tree arg1
)
835 gcc_assert (TREE_CODE (arg0
) == REAL_CST
);
836 if (!real_isinteger (TREE_REAL_CST_PTR (arg0
), TYPE_MODE (TREE_TYPE (arg0
))))
839 if (TREE_CODE (arg1
) != SSA_NAME
)
842 gimple
*def
= SSA_NAME_DEF_STMT (arg1
);
843 gphi
*phi
= dyn_cast
<gphi
*> (def
);
844 tree cst1
= NULL_TREE
;
845 enum tree_code code
= ERROR_MARK
;
848 if (!is_gimple_assign (def
))
850 code
= gimple_assign_rhs_code (def
);
859 if (TREE_CODE (gimple_assign_rhs1 (def
)) != SSA_NAME
860 || TREE_CODE (gimple_assign_rhs2 (def
)) != REAL_CST
)
863 cst1
= gimple_assign_rhs2 (def
);
865 phi
= dyn_cast
<gphi
*> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def
)));
870 tree cst2
= NULL_TREE
;
871 int n
= gimple_phi_num_args (phi
);
872 for (int i
= 0; i
< n
; i
++)
874 tree arg
= PHI_ARG_DEF (phi
, i
);
875 if (TREE_CODE (arg
) != REAL_CST
)
877 else if (cst2
== NULL_TREE
)
879 else if (!operand_equal_p (cst2
, arg
, 0))
884 cst2
= const_binop (code
, TREE_TYPE (cst2
), cst2
, cst1
);
886 && TREE_CODE (cst2
) == REAL_CST
887 && real_isinteger (TREE_REAL_CST_PTR (cst2
),
888 TYPE_MODE (TREE_TYPE (cst2
))))