1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "fold-const-call.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
37 #include "gimple-match.h"
38 #include "tree-pass.h"
39 #include "internal-fn.h"
40 #include "case-cfn-macros.h"
42 #include "optabs-tree.h"
45 /* Forward declarations of the private auto-generated matchers.
46 They expect valueized operands in canonical order and do not
47 perform simplification of all-constant operands. */
48 static bool gimple_simplify (code_helper
*, tree
*,
49 gimple_seq
*, tree (*)(tree
),
50 code_helper
, tree
, tree
);
51 static bool gimple_simplify (code_helper
*, tree
*,
52 gimple_seq
*, tree (*)(tree
),
53 code_helper
, tree
, tree
, tree
);
54 static bool gimple_simplify (code_helper
*, tree
*,
55 gimple_seq
*, tree (*)(tree
),
56 code_helper
, tree
, tree
, tree
, tree
);
59 /* Return whether T is a constant that we'll dispatch to fold to
60 evaluate fully constant expressions. */
63 constant_for_folding (tree t
)
65 return (CONSTANT_CLASS_P (t
)
66 /* The following is only interesting to string builtins. */
67 || (TREE_CODE (t
) == ADDR_EXPR
68 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
72 /* Helper that matches and simplifies the toplevel result from
73 a gimple_simplify run (where we don't want to build
74 a stmt in case it's used in in-place folding). Replaces
75 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
76 result and returns whether any change was made. */
79 gimple_resimplify1 (gimple_seq
*seq
,
80 code_helper
*res_code
, tree type
, tree
*res_ops
,
81 tree (*valueize
)(tree
))
83 if (constant_for_folding (res_ops
[0]))
86 if (res_code
->is_tree_code ())
87 tem
= const_unop (*res_code
, type
, res_ops
[0]);
89 tem
= fold_const_call (combined_fn (*res_code
), type
, res_ops
[0]);
91 && CONSTANT_CLASS_P (tem
))
93 if (TREE_OVERFLOW_P (tem
))
94 tem
= drop_tree_overflow (tem
);
96 res_ops
[1] = NULL_TREE
;
97 res_ops
[2] = NULL_TREE
;
98 *res_code
= TREE_CODE (res_ops
[0]);
103 code_helper res_code2
;
104 tree res_ops2
[3] = {};
105 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
106 *res_code
, type
, res_ops
[0]))
108 *res_code
= res_code2
;
109 res_ops
[0] = res_ops2
[0];
110 res_ops
[1] = res_ops2
[1];
111 res_ops
[2] = res_ops2
[2];
118 /* Helper that matches and simplifies the toplevel result from
119 a gimple_simplify run (where we don't want to build
120 a stmt in case it's used in in-place folding). Replaces
121 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
122 result and returns whether any change was made. */
125 gimple_resimplify2 (gimple_seq
*seq
,
126 code_helper
*res_code
, tree type
, tree
*res_ops
,
127 tree (*valueize
)(tree
))
129 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1]))
131 tree tem
= NULL_TREE
;
132 if (res_code
->is_tree_code ())
133 tem
= const_binop (*res_code
, type
, res_ops
[0], res_ops
[1]);
135 tem
= fold_const_call (combined_fn (*res_code
), type
,
136 res_ops
[0], res_ops
[1]);
138 && CONSTANT_CLASS_P (tem
))
140 if (TREE_OVERFLOW_P (tem
))
141 tem
= drop_tree_overflow (tem
);
143 res_ops
[1] = NULL_TREE
;
144 res_ops
[2] = NULL_TREE
;
145 *res_code
= TREE_CODE (res_ops
[0]);
150 /* Canonicalize operand order. */
151 bool canonicalized
= false;
152 if (res_code
->is_tree_code ()
153 && (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
154 || commutative_tree_code (*res_code
))
155 && tree_swap_operands_p (res_ops
[0], res_ops
[1]))
157 std::swap (res_ops
[0], res_ops
[1]);
158 if (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
)
159 *res_code
= swap_tree_comparison (*res_code
);
160 canonicalized
= true;
163 code_helper res_code2
;
164 tree res_ops2
[3] = {};
165 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
166 *res_code
, type
, res_ops
[0], res_ops
[1]))
168 *res_code
= res_code2
;
169 res_ops
[0] = res_ops2
[0];
170 res_ops
[1] = res_ops2
[1];
171 res_ops
[2] = res_ops2
[2];
175 return canonicalized
;
178 /* Helper that matches and simplifies the toplevel result from
179 a gimple_simplify run (where we don't want to build
180 a stmt in case it's used in in-place folding). Replaces
181 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
182 result and returns whether any change was made. */
185 gimple_resimplify3 (gimple_seq
*seq
,
186 code_helper
*res_code
, tree type
, tree
*res_ops
,
187 tree (*valueize
)(tree
))
189 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1])
190 && constant_for_folding (res_ops
[2]))
192 tree tem
= NULL_TREE
;
193 if (res_code
->is_tree_code ())
194 tem
= fold_ternary
/*_to_constant*/ (*res_code
, type
, res_ops
[0],
195 res_ops
[1], res_ops
[2]);
197 tem
= fold_const_call (combined_fn (*res_code
), type
,
198 res_ops
[0], res_ops
[1], res_ops
[2]);
200 && CONSTANT_CLASS_P (tem
))
202 if (TREE_OVERFLOW_P (tem
))
203 tem
= drop_tree_overflow (tem
);
205 res_ops
[1] = NULL_TREE
;
206 res_ops
[2] = NULL_TREE
;
207 *res_code
= TREE_CODE (res_ops
[0]);
212 /* Canonicalize operand order. */
213 bool canonicalized
= false;
214 if (res_code
->is_tree_code ()
215 && commutative_ternary_tree_code (*res_code
)
216 && tree_swap_operands_p (res_ops
[0], res_ops
[1]))
218 std::swap (res_ops
[0], res_ops
[1]);
219 canonicalized
= true;
222 code_helper res_code2
;
223 tree res_ops2
[3] = {};
224 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
226 res_ops
[0], res_ops
[1], res_ops
[2]))
228 *res_code
= res_code2
;
229 res_ops
[0] = res_ops2
[0];
230 res_ops
[1] = res_ops2
[1];
231 res_ops
[2] = res_ops2
[2];
235 return canonicalized
;
239 /* If in GIMPLE expressions with CODE go as single-rhs build
240 a GENERIC tree for that expression into *OP0. */
243 maybe_build_generic_op (enum tree_code code
, tree type
, tree
*ops
)
249 case VIEW_CONVERT_EXPR
:
250 ops
[0] = build1 (code
, type
, ops
[0]);
253 ops
[0] = build3 (code
, type
, ops
[0], ops
[1], ops
[2]);
254 ops
[1] = ops
[2] = NULL_TREE
;
260 tree (*mprts_hook
) (code_helper
, tree
, tree
*);
262 /* Try to build a call to FN with return type TYPE and the NARGS
263 arguments given in OPS. Return null if the target doesn't support
267 build_call_internal (internal_fn fn
, tree type
, unsigned int nargs
, tree
*ops
)
269 if (direct_internal_fn_p (fn
))
271 tree_pair types
= direct_internal_fn_types (fn
, type
, ops
);
272 if (!direct_internal_fn_supported_p (fn
, types
, OPTIMIZE_FOR_BOTH
))
275 return gimple_build_call_internal (fn
, nargs
, ops
[0], ops
[1], ops
[2]);
278 /* Push the exploded expression described by RCODE, TYPE and OPS
279 as a statement to SEQ if necessary and return a gimple value
280 denoting the value of the expression. If RES is not NULL
281 then the result will be always RES and even gimple values are
285 maybe_push_res_to_seq (code_helper rcode
, tree type
, tree
*ops
,
286 gimple_seq
*seq
, tree res
)
288 if (rcode
.is_tree_code ())
291 && gimple_simplified_result_is_gimple_val (rcode
, ops
))
295 tree tem
= mprts_hook (rcode
, type
, ops
);
301 /* Play safe and do not allow abnormals to be mentioned in
302 newly created statements. */
303 if ((TREE_CODE (ops
[0]) == SSA_NAME
304 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
306 && TREE_CODE (ops
[1]) == SSA_NAME
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
309 && TREE_CODE (ops
[2]) == SSA_NAME
310 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2]))
311 || (COMPARISON_CLASS_P (ops
[0])
312 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
313 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0],
315 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
316 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0],
321 if (gimple_in_ssa_p (cfun
))
322 res
= make_ssa_name (type
);
324 res
= create_tmp_reg (type
);
326 maybe_build_generic_op (rcode
, type
, ops
);
327 gimple
*new_stmt
= gimple_build_assign (res
, rcode
,
328 ops
[0], ops
[1], ops
[2]);
329 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
336 combined_fn fn
= rcode
;
337 /* Play safe and do not allow abnormals to be mentioned in
338 newly created statements. */
340 for (nargs
= 0; nargs
< 3; ++nargs
)
344 if (TREE_CODE (ops
[nargs
]) == SSA_NAME
345 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[nargs
]))
348 gcc_assert (nargs
!= 0);
349 gcall
*new_stmt
= NULL
;
350 if (internal_fn_p (fn
))
352 /* Generate the given function if we can. */
353 internal_fn ifn
= as_internal_fn (fn
);
354 new_stmt
= build_call_internal (ifn
, type
, nargs
, ops
);
360 /* Find the function we want to call. */
361 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
365 /* We can't and should not emit calls to non-const functions. */
366 if (!(flags_from_decl_or_type (decl
) & ECF_CONST
))
369 new_stmt
= gimple_build_call (decl
, nargs
, ops
[0], ops
[1], ops
[2]);
373 if (gimple_in_ssa_p (cfun
))
374 res
= make_ssa_name (type
);
376 res
= create_tmp_reg (type
);
378 gimple_call_set_lhs (new_stmt
, res
);
379 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
385 /* Public API overloads follow for operation being tree_code or
386 built_in_function and for one to three operands or arguments.
387 They return NULL_TREE if nothing could be simplified or
388 the resulting simplified value with parts pushed to SEQ.
389 If SEQ is NULL then if the simplification needs to create
390 new stmts it will fail. If VALUEIZE is non-NULL then all
391 SSA names will be valueized using that hook prior to
392 applying simplifications. */
397 gimple_simplify (enum tree_code code
, tree type
,
399 gimple_seq
*seq
, tree (*valueize
)(tree
))
401 if (constant_for_folding (op0
))
403 tree res
= const_unop (code
, type
, op0
);
405 && CONSTANT_CLASS_P (res
))
411 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
414 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
420 gimple_simplify (enum tree_code code
, tree type
,
422 gimple_seq
*seq
, tree (*valueize
)(tree
))
424 if (constant_for_folding (op0
) && constant_for_folding (op1
))
426 tree res
= const_binop (code
, type
, op0
, op1
);
428 && CONSTANT_CLASS_P (res
))
432 /* Canonicalize operand order both for matching and fallback stmt
434 if ((commutative_tree_code (code
)
435 || TREE_CODE_CLASS (code
) == tcc_comparison
)
436 && tree_swap_operands_p (op0
, op1
))
438 std::swap (op0
, op1
);
439 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
440 code
= swap_tree_comparison (code
);
445 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
446 code
, type
, op0
, op1
))
448 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
454 gimple_simplify (enum tree_code code
, tree type
,
455 tree op0
, tree op1
, tree op2
,
456 gimple_seq
*seq
, tree (*valueize
)(tree
))
458 if (constant_for_folding (op0
) && constant_for_folding (op1
)
459 && constant_for_folding (op2
))
461 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
463 && CONSTANT_CLASS_P (res
))
467 /* Canonicalize operand order both for matching and fallback stmt
469 if (commutative_ternary_tree_code (code
)
470 && tree_swap_operands_p (op0
, op1
))
471 std::swap (op0
, op1
);
475 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
476 code
, type
, op0
, op1
, op2
))
478 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
481 /* Builtin or internal function with one argument. */
484 gimple_simplify (combined_fn fn
, tree type
,
486 gimple_seq
*seq
, tree (*valueize
)(tree
))
488 if (constant_for_folding (arg0
))
490 tree res
= fold_const_call (fn
, type
, arg0
);
491 if (res
&& CONSTANT_CLASS_P (res
))
497 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
, fn
, type
, arg0
))
499 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
502 /* Builtin or internal function with two arguments. */
505 gimple_simplify (combined_fn fn
, tree type
,
506 tree arg0
, tree arg1
,
507 gimple_seq
*seq
, tree (*valueize
)(tree
))
509 if (constant_for_folding (arg0
)
510 && constant_for_folding (arg1
))
512 tree res
= fold_const_call (fn
, type
, arg0
, arg1
);
513 if (res
&& CONSTANT_CLASS_P (res
))
519 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
, fn
, type
, arg0
, arg1
))
521 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
524 /* Builtin or internal function with three arguments. */
527 gimple_simplify (combined_fn fn
, tree type
,
528 tree arg0
, tree arg1
, tree arg2
,
529 gimple_seq
*seq
, tree (*valueize
)(tree
))
531 if (constant_for_folding (arg0
)
532 && constant_for_folding (arg1
)
533 && constant_for_folding (arg2
))
535 tree res
= fold_const_call (fn
, type
, arg0
, arg1
, arg2
);
536 if (res
&& CONSTANT_CLASS_P (res
))
542 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
543 fn
, type
, arg0
, arg1
, arg2
))
545 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
548 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
549 VALUEIZED to true if valueization changed OP. */
552 do_valueize (tree op
, tree (*valueize
)(tree
), bool &valueized
)
554 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
556 tree tem
= valueize (op
);
557 if (tem
&& tem
!= op
)
566 /* The main STMT based simplification entry. It is used by the fold_stmt
567 and the fold_stmt_to_constant APIs. */
570 gimple_simplify (gimple
*stmt
,
571 code_helper
*rcode
, tree
*ops
,
573 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
575 switch (gimple_code (stmt
))
579 enum tree_code code
= gimple_assign_rhs_code (stmt
);
580 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
581 switch (gimple_assign_rhs_class (stmt
))
583 case GIMPLE_SINGLE_RHS
:
584 if (code
== REALPART_EXPR
585 || code
== IMAGPART_EXPR
586 || code
== VIEW_CONVERT_EXPR
)
588 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
589 bool valueized
= false;
590 op0
= do_valueize (op0
, top_valueize
, valueized
);
593 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
596 else if (code
== BIT_FIELD_REF
)
598 tree rhs1
= gimple_assign_rhs1 (stmt
);
599 tree op0
= TREE_OPERAND (rhs1
, 0);
600 bool valueized
= false;
601 op0
= do_valueize (op0
, top_valueize
, valueized
);
604 ops
[1] = TREE_OPERAND (rhs1
, 1);
605 ops
[2] = TREE_OPERAND (rhs1
, 2);
606 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
609 else if (code
== SSA_NAME
612 tree op0
= gimple_assign_rhs1 (stmt
);
613 tree valueized
= top_valueize (op0
);
614 if (!valueized
|| op0
== valueized
)
617 *rcode
= TREE_CODE (op0
);
621 case GIMPLE_UNARY_RHS
:
623 tree rhs1
= gimple_assign_rhs1 (stmt
);
624 bool valueized
= false;
625 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
628 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
631 case GIMPLE_BINARY_RHS
:
633 tree rhs1
= gimple_assign_rhs1 (stmt
);
634 tree rhs2
= gimple_assign_rhs2 (stmt
);
635 bool valueized
= false;
636 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
637 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
641 return (gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
)
644 case GIMPLE_TERNARY_RHS
:
646 bool valueized
= false;
647 tree rhs1
= gimple_assign_rhs1 (stmt
);
648 /* If this is a [VEC_]COND_EXPR first try to simplify an
649 embedded GENERIC condition. */
650 if (code
== COND_EXPR
651 || code
== VEC_COND_EXPR
)
653 if (COMPARISON_CLASS_P (rhs1
))
655 tree lhs
= TREE_OPERAND (rhs1
, 0);
656 tree rhs
= TREE_OPERAND (rhs1
, 1);
657 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
658 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
659 code_helper rcode2
= TREE_CODE (rhs1
);
663 if ((gimple_resimplify2 (seq
, &rcode2
, TREE_TYPE (rhs1
),
666 && rcode2
.is_tree_code ())
669 if (TREE_CODE_CLASS ((enum tree_code
)rcode2
)
671 rhs1
= build2 (rcode2
, TREE_TYPE (rhs1
),
673 else if (rcode2
== SSA_NAME
674 || rcode2
== INTEGER_CST
675 || rcode2
== VECTOR_CST
)
682 tree rhs2
= gimple_assign_rhs2 (stmt
);
683 tree rhs3
= gimple_assign_rhs3 (stmt
);
684 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
685 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
686 rhs3
= do_valueize (rhs3
, top_valueize
, valueized
);
691 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
701 /* ??? This way we can't simplify calls with side-effects. */
702 if (gimple_call_lhs (stmt
) != NULL_TREE
703 && gimple_call_num_args (stmt
) >= 1
704 && gimple_call_num_args (stmt
) <= 3)
706 bool valueized
= false;
707 if (gimple_call_internal_p (stmt
))
708 *rcode
= as_combined_fn (gimple_call_internal_fn (stmt
));
711 tree fn
= gimple_call_fn (stmt
);
715 fn
= do_valueize (fn
, top_valueize
, valueized
);
716 if (TREE_CODE (fn
) != ADDR_EXPR
717 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
720 tree decl
= TREE_OPERAND (fn
, 0);
721 if (DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_NORMAL
722 || !gimple_builtin_call_types_compatible_p (stmt
, decl
))
725 *rcode
= as_combined_fn (DECL_FUNCTION_CODE (decl
));
728 tree type
= TREE_TYPE (gimple_call_lhs (stmt
));
729 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
731 tree arg
= gimple_call_arg (stmt
, i
);
732 ops
[i
] = do_valueize (arg
, top_valueize
, valueized
);
734 switch (gimple_call_num_args (stmt
))
737 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
740 return (gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
)
743 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
753 tree lhs
= gimple_cond_lhs (stmt
);
754 tree rhs
= gimple_cond_rhs (stmt
);
755 bool valueized
= false;
756 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
757 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
758 *rcode
= gimple_cond_code (stmt
);
761 return (gimple_resimplify2 (seq
, rcode
,
762 boolean_type_node
, ops
, valueize
)
774 /* Helper for the autogenerated code, valueize OP. */
777 do_valueize (tree (*valueize
)(tree
), tree op
)
779 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
781 tree tem
= valueize (op
);
788 /* Helper for the autogenerated code, get at the definition of NAME when
789 VALUEIZE allows that. */
792 get_def (tree (*valueize
)(tree
), tree name
)
794 if (valueize
&& ! valueize (name
))
796 return SSA_NAME_DEF_STMT (name
);
799 /* Routine to determine if the types T1 and T2 are effectively
800 the same for GIMPLE. If T1 or T2 is not a type, the test
801 applies to their TREE_TYPE. */
804 types_match (tree t1
, tree t2
)
811 return types_compatible_p (t1
, t2
);
814 /* Return if T has a single use. For GIMPLE, we also allow any
815 non-SSA_NAME (ie constants) and zero uses to cope with uses
816 that aren't linked up yet. */
821 return TREE_CODE (t
) != SSA_NAME
|| has_zero_uses (t
) || has_single_use (t
);
824 /* Return true if math operations should be canonicalized,
825 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
828 canonicalize_math_p ()
830 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_opt_math
) == 0;
833 /* Return true if math operations that are beneficial only after
834 vectorization should be canonicalized. */
837 canonicalize_math_after_vectorization_p ()
839 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) != 0;
842 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
843 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
844 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
845 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
846 will likely be exact, while exp (log (arg0) * arg1) might be not.
847 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
850 optimize_pow_to_exp (tree arg0
, tree arg1
)
852 gcc_assert (TREE_CODE (arg0
) == REAL_CST
);
853 if (!real_isinteger (TREE_REAL_CST_PTR (arg0
), TYPE_MODE (TREE_TYPE (arg0
))))
856 if (TREE_CODE (arg1
) != SSA_NAME
)
859 gimple
*def
= SSA_NAME_DEF_STMT (arg1
);
860 gphi
*phi
= dyn_cast
<gphi
*> (def
);
861 tree cst1
= NULL_TREE
;
862 enum tree_code code
= ERROR_MARK
;
865 if (!is_gimple_assign (def
))
867 code
= gimple_assign_rhs_code (def
);
876 if (TREE_CODE (gimple_assign_rhs1 (def
)) != SSA_NAME
877 || TREE_CODE (gimple_assign_rhs2 (def
)) != REAL_CST
)
880 cst1
= gimple_assign_rhs2 (def
);
882 phi
= dyn_cast
<gphi
*> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def
)));
887 tree cst2
= NULL_TREE
;
888 int n
= gimple_phi_num_args (phi
);
889 for (int i
= 0; i
< n
; i
++)
891 tree arg
= PHI_ARG_DEF (phi
, i
);
892 if (TREE_CODE (arg
) != REAL_CST
)
894 else if (cst2
== NULL_TREE
)
896 else if (!operand_equal_p (cst2
, arg
, 0))
901 cst2
= const_binop (code
, TREE_TYPE (cst2
), cst2
, cst1
);
903 && TREE_CODE (cst2
) == REAL_CST
904 && real_isinteger (TREE_REAL_CST_PTR (cst2
),
905 TYPE_MODE (TREE_TYPE (cst2
))))