1 /* Preamble and helpers for the autogenerated gimple-match.cc file.
2 Copyright (C) 2014-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
39 #include "gimple-match.h"
40 #include "tree-pass.h"
41 #include "internal-fn.h"
42 #include "case-cfn-macros.h"
44 #include "optabs-tree.h"
48 #include "gimple-range.h"
49 #include "langhooks.h"
51 tree
do_valueize (tree
, tree (*)(tree
), bool &);
52 tree
do_valueize (tree (*)(tree
), tree
);
54 /* Helper for the autogenerated code, get at the definition of NAME when
55 VALUEIZE allows that. */
58 get_def (tree (*valueize
)(tree
), tree name
)
60 if (valueize
&& ! valueize (name
))
62 return SSA_NAME_DEF_STMT (name
);
65 /* Routine to determine if the types T1 and T2 are effectively
66 the same for GIMPLE. If T1 or T2 is not a type, the test
67 applies to their TREE_TYPE. */
70 types_match (tree t1
, tree t2
)
77 return types_compatible_p (t1
, t2
);
80 /* Return if T has a single use. For GIMPLE, we also allow any
81 non-SSA_NAME (ie constants) and zero uses to cope with uses
82 that aren't linked up yet. */
85 single_use (const_tree
) ATTRIBUTE_PURE
;
88 single_use (const_tree t
)
90 if (TREE_CODE (t
) != SSA_NAME
)
93 /* Inline return has_zero_uses (t) || has_single_use (t); */
94 const ssa_use_operand_t
*const head
= &(SSA_NAME_IMM_USE_NODE (t
));
95 const ssa_use_operand_t
*ptr
;
98 for (ptr
= head
->next
; ptr
!= head
; ptr
= ptr
->next
)
99 if (USE_STMT(ptr
) && !is_gimple_debug (USE_STMT (ptr
)))
108 /* Return true if math operations should be canonicalized,
109 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
112 canonicalize_math_p ()
114 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_opt_math
) == 0;
117 /* Return true if math operations that are beneficial only after
118 vectorization should be canonicalized. */
121 canonicalize_math_after_vectorization_p ()
123 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) != 0;
126 /* Return true if we can still perform transformations that may introduce
127 vector operations that are not supported by the target. Vector lowering
128 normally handles those, but after that pass, it becomes unsafe. */
131 optimize_vectors_before_lowering_p ()
133 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) == 0;
136 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
137 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
138 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
139 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
140 will likely be exact, while exp (log (arg0) * arg1) might be not.
141 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
144 optimize_pow_to_exp (tree arg0
, tree arg1
)
146 gcc_assert (TREE_CODE (arg0
) == REAL_CST
);
147 if (!real_isinteger (TREE_REAL_CST_PTR (arg0
), TYPE_MODE (TREE_TYPE (arg0
))))
150 if (TREE_CODE (arg1
) != SSA_NAME
)
153 gimple
*def
= SSA_NAME_DEF_STMT (arg1
);
154 gphi
*phi
= dyn_cast
<gphi
*> (def
);
155 tree cst1
= NULL_TREE
;
156 enum tree_code code
= ERROR_MARK
;
159 if (!is_gimple_assign (def
))
161 code
= gimple_assign_rhs_code (def
);
170 if (TREE_CODE (gimple_assign_rhs1 (def
)) != SSA_NAME
171 || TREE_CODE (gimple_assign_rhs2 (def
)) != REAL_CST
)
174 cst1
= gimple_assign_rhs2 (def
);
176 phi
= dyn_cast
<gphi
*> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def
)));
181 tree cst2
= NULL_TREE
;
182 int n
= gimple_phi_num_args (phi
);
183 for (int i
= 0; i
< n
; i
++)
185 tree arg
= PHI_ARG_DEF (phi
, i
);
186 if (TREE_CODE (arg
) != REAL_CST
)
188 else if (cst2
== NULL_TREE
)
190 else if (!operand_equal_p (cst2
, arg
, 0))
195 cst2
= const_binop (code
, TREE_TYPE (cst2
), cst2
, cst1
);
197 && TREE_CODE (cst2
) == REAL_CST
198 && real_isinteger (TREE_REAL_CST_PTR (cst2
),
199 TYPE_MODE (TREE_TYPE (cst2
))))
204 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
205 is another division can be optimized. Don't optimize if INNER_DIV
206 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
209 optimize_successive_divisions_p (tree divisor
, tree inner_div
)
211 if (!gimple_in_ssa_p (cfun
))
214 imm_use_iterator imm_iter
;
216 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, inner_div
)
218 gimple
*use_stmt
= USE_STMT (use_p
);
219 if (!is_gimple_assign (use_stmt
)
220 || gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
221 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), divisor
, 0))